diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ + diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 6ec97c9ef..b918352d7 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -21,4 +21,4 @@ All submissions, including submissions by project members, require review. We use GitHub pull requests for this purpose. Consult [GitHub Help] for more information on using pull requests. -[GitHub Help]: https://help.github.com/articles/about-pull-requests/ \ No newline at end of file +[github help]: https://help.github.com/articles/about-pull-requests/ diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 89a43f576..000000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,69 +0,0 @@ - - - -### Version info - - - -**firebase-functions:** - -**firebase-tools:** - -**firebase-admin:** - -### Test case - - - - -### Steps to reproduce - - - - -### Were you able to successfully deploy your functions? - - - - -### Expected behavior - - - - -### Actual behavior - - diff --git a/.github/ISSUE_TEMPLATE/---report-a-bug.md b/.github/ISSUE_TEMPLATE/---report-a-bug.md new file mode 100644 index 000000000..abffad1b7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/---report-a-bug.md @@ -0,0 +1,54 @@ +--- +name: "⚠️ Report a Bug" +about: Think you found a bug in the firebase-functions SDK? Report it here. Please do not use this form if your function is deployed successfully but not working as you expected. +title: "" +labels: "" +assignees: "" +--- + + + +### Related issues + + + +### [REQUIRED] Version info + + + +**node:** + + + +**firebase-functions:** + +**firebase-tools:** + + + +**firebase-admin:** + +### [REQUIRED] Test case + + + +### [REQUIRED] Steps to reproduce + + + +### [REQUIRED] Expected behavior + + + +### [REQUIRED] Actual behavior + + + +### Were you able to successfully deploy your functions? + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..918e205f9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: 💻 Bug in the Firebase CLI + url: https://github.com/firebase/firebase-tools/issues/new/choose + about: Have you found a bug in the Firebase CLI? + - name: 🔥 Firebase Support + url: https://firebase.google.com/support/ + about: If you have an issue with your functions in production, please contact support. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 9ff31d201..87c5bdec6 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -5,8 +5,8 @@ and make note of the following: Run the linter and test suite ============================== -Run `npm test` to make sure your changes compile properly and the tests all pass on your local machine. -We've hooked up this repo with continuous integration to double check those things for you. +Run `npm test` to make sure your changes compile properly and the tests all pass on your local machine. +We've hooked up this repo with continuous integration to double check those things for you. Add tests (if applicable) ============================== @@ -20,7 +20,6 @@ before sending PRs. We cannot accept code without this. --> - ### Description \ No newline at end of file + diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 000000000..d0e9b406b --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,33 @@ +name: Docgen + +on: + push: + branches: + - master + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: "24" + - name: Cache npm + uses: actions/cache@v4 + with: + path: ~/.npm + key: ${{ runner.os }}-docgen-${{ hashFiles('**/package-lock.json') }} + - name: Install dependencies + run: npm ci + - name: Generate Reference Docs + run: | + npm run docgen:v1 + npm run docgen:v2 + - uses: actions/upload-artifact@v4 + name: Upload Docs Preview + with: + name: reference-docs + path: | + ./docgen/v1/markdown/ + ./docgen/v2/markdown/ diff --git a/.github/workflows/postmerge.yaml b/.github/workflows/postmerge.yaml new file mode 100644 index 000000000..e82d70d65 --- /dev/null +++ b/.github/workflows/postmerge.yaml @@ -0,0 +1,51 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name: Post-merge tests + +on: + workflow_dispatch: + +concurrency: + group: postmerge-${{ github.ref }} + cancel-in-progress: true + +env: + CI: true + +jobs: + postmerge: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 24 + + - uses: google-github-actions/auth@v0 + with: + credentials_json: "${{ secrets.CF3_INTEGRATION_TEST_GOOGLE_CREDENTIALS }}" + create_credentials_file: true + + - name: "Set up Cloud SDK" + uses: google-github-actions/setup-gcloud@v0 + + - name: "Setup Firebase CLI" + run: npm i -g firebase-tools + + - name: "Run integration test" + run: npm run test:postmerge + + - name: Print debug logs + if: failure() + run: find . -type f -name "*debug.log" | xargs cat diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 000000000..2b2e666a0 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,112 @@ +name: CI Tests + +on: + - pull_request + - push + +permissions: + contents: read + +env: + CI: true + +jobs: + lint: + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + strategy: + matrix: + node-version: + - 22.x + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: npm + - run: npm ci + - run: npm run lint + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 22.x + cache: npm + - run: npm ci + - run: npm run build + - run: npm pack + - uses: actions/upload-artifact@v4 + with: + name: lib + path: lib/ + - uses: actions/upload-artifact@v4 + with: + name: tarball + path: firebase-functions-*.tgz + + unit: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - 18.x + - 20.x + - 22.x + - 24.x + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: npm + - run: npm ci + - run: npm run test + + integration: + needs: build + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - 18.x + - 20.x + - 22.x + - 24.x + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: npm + - run: npm ci + - uses: actions/download-artifact@v4 + with: + name: lib + path: lib + - run: npm run test:bin + env: + SKIP_BUILD: true + + packaging: + needs: build + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - 18.x + - 20.x + - 22.x + - 24.x + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + - uses: actions/download-artifact@v4 + with: + name: tarball + path: . + - run: chmod +x ./scripts/test-packaging.sh && ./scripts/test-packaging.sh firebase-functions-*.tgz diff --git a/.gitignore b/.gitignore index 543c4a073..017bc9f40 100644 --- a/.gitignore +++ b/.gitignore @@ -3,12 +3,20 @@ .tmp .vscode/ coverage +dist/ +docgen/html +docgen/*/temp +docgen/*/markdown +docgen/*/*.json +docgen/*/*.md firebase-functions-*.tgz integration_test/.firebaserc integration_test/*.log integration_test/functions/firebase-functions.tgz +integration_test/functions/package.json lib node_modules npm-debug.log typings yarn.lock +.DS_Store diff --git a/.guides/config.json b/.guides/config.json new file mode 100644 index 000000000..c67d63445 --- /dev/null +++ b/.guides/config.json @@ -0,0 +1,9 @@ +{ + "description": "Use this library to build serverless functions for event triggers and HTTP using Cloud Functions for Firebase", + "mcpServers": { + "firebase": { + "command": "npx", + "args": ["-y", "firebase-tools@latest", "experimental:mcp"] + } + } +} diff --git a/.guides/setup.md b/.guides/setup.md new file mode 100644 index 000000000..2a6d54115 --- /dev/null +++ b/.guides/setup.md @@ -0,0 +1,69 @@ +# Cloud Functions for Firebase setup guide + +This guide provides a step-by-step process for setting up Cloud Functions, tailored for coding agents. + +## 1. Create a 2nd-gen HTTP function + +Replace the contents of `src/index.ts` (or `index.js`) with the following code to create a simple, modern v2 HTTP endpoint. + +```typescript +import { onRequest } from "firebase-functions/https"; +import { onDocumentCreated } from "firebase-functions/firestore"; +import * as logger from "firebase-functions/logger"; +import { defineString, defineInt } from "firebase-functions/params"; + +// Configurable parameters +const scaleLimit = defineInt("MAX_INSTANCES", { default: 1 }); +const greeting = defineString("GREETING", { default: "Hello" }); + +export const helloWorld = onRequest({ maxInstances: scaleLimit }, async (request, response) => { + logger.info("Request received!", request); + response.send(`${greeting.value()} from Firebase!`); +}); + +export const newDoc = onDocumentCreated( + { maxInstances: scaleLimit }, + "/words/{wordId}", + async (event) => { + logger.info("New word!", event.data.data()); + } +); +``` + +**Key points for the agent:** + +- Use `logger` for structured logging. +- Use `defineString`, `defineInt`, `defineSecret` for environment configuration instead of `functions.config()`. +- Recommend a `maxInstances` setting of 1 for all functions until correct behavior can be verified. + +## 2. Build TypeScript + +Compile your TypeScript code to JavaScript. + +```bash +npm run build +``` + +## 3. Local Development and Testing + +Use the Firebase Emulators to test your function locally before deploying. + +A human should run the following command in a separate terminal window to start the emulators: + +```bash +# Start the functions emulator +firebase emulators:start --only functions +``` + +A human can then interact with the function at the local URL provided by the emulator. + +## 4. Deploy to Firebase + +Once testing is complete, deploy the function to your Firebase project. + +```bash +# Deploy only the functions +firebase deploy --only functions +``` + +The agent will be prompted to set any parameters defined with `defineString` or other `define` functions that do not have a default value. diff --git a/.guides/upgrade.md b/.guides/upgrade.md new file mode 100644 index 000000000..93dd92cc1 --- /dev/null +++ b/.guides/upgrade.md @@ -0,0 +1,178 @@ +# Upgrading a 1st gen to 2nd gen + +This guide provides a step-by-step process for migrating a single Cloud Function from 1st to 2nd generation. Migrate functions one-by-one. Run both generations side-by-side before deleting the 1st gen function. + +## 1. Identify a 1st-gen function to upgrade + +Find all 1st-gen functions in the directory. 1st-gen functions used a namespaced API like this: + +**Before (1st Gen):** + +```typescript +import * as functions from "firebase-functions"; + +export const webhook = functions.https.onRequest((request, response) => { + // ... +}); +``` + +Sometimes, they'll explicitly import from the `firebase-functions/v1` subpackage, but not always. + +Ask the human to pick a **single** function to upgrade from the list of 1st gen functions you found. + +## 2. Update Dependencies + +Ensure your `firebase-functions` and `firebase-admin` SDKs are up-to-date, and you are using a recent version of the Firebase CLI. + +## 3. Modify Imports + +Update your import statements to use the top-level modules. + +**After (2nd Gen):** + +```typescript +import { onRequest } from "firebase-functions/https"; +``` + +## 4. Update Trigger Definition + +The SDK is now more modular. Update your trigger definition accordingly. + +**After (2nd Gen):** + +```typescript +export const webhook = onRequest((request, response) => { + // ... +}); +``` + +Here are other examples of trigger changes: + +### Callable Triggers + +**Before (1st Gen):** + +```typescript +export const getprofile = functions.https.onCall((data, context) => { + // ... +}); +``` + +**After (2nd Gen):** + +```typescript +import { onCall } from "firebase-functions/https"; + +export const getprofile = onCall((request) => { + // ... +}); +``` + +### Background Triggers (Pub/Sub) + +**Before (1st Gen):** + +```typescript +export const hellopubsub = functions.pubsub.topic("topic-name").onPublish((message) => { + // ... +}); +``` + +**After (2nd Gen):** + +```typescript +import { onMessagePublished } from "firebase-functions/pubsub"; + +export const hellopubsub = onMessagePublished("topic-name", (event) => { + // ... +}); +``` + +## 5. Use Parameterized Configuration + +Migrate from `functions.config()` to the new `params` module for environment configuration. + +**Before (`.runtimeconfig.json`):** + +```json +{ + "someservice": { + "key": "somesecret" + } +} +``` + +**And in code (1st Gen):** + +```typescript +const SKEY = functions.config().someservice.key; +``` + +**After (2nd Gen):** +Define params in your code and set their values during deployment. + +**In `index.ts`:** + +```typescript +import { defineString } from "firebase-functions/params"; + +const SOMESERVICE_KEY = defineString("SOMESERVICE_KEY"); +``` + +Use `SOMESERVICE_KEY.value()` to access the value. For secrets like API keys, use `defineSecret`. + +**In `index.ts`:** + +```typescript +import { defineSecret } from "firebase-functions/params"; + +const SOMESERVICE_KEY = defineSecret("SOMESERVICE_KEY"); +``` + +The human will be prompted to set the value on deployment. The value will be stored securely in Cloud Secret Manager. + +## 6. Update Runtime Options + +Runtime options are now set directly within the function definition. + +**Before (1st Gen):** + +```typescript +export const func = functions + .runWith({ + // Keep 5 instances warm + minInstances: 5, + }) + .https.onRequest((request, response) => { + // ... + }); +``` + +**After (2nd Gen):** + +```typescript +import { onRequest } from "firebase-functions/https"; + +export const func = onRequest( + { + // Keep 5 instances warm + minInstances: 5, + }, + (request, response) => { + // ... + } +); +``` + +## 7. Traffic Migration + +A human should follow these steps to migrate safely: + +> To migrate traffic safely: +> +> 1. Rename your new 2nd gen function with a different name. +> 2. Comment out any existing `minInstances` or `maxInstances` config in the new 2nd gen function and instead set `maxInstances` to `1` while testing. +> 3. Deploy it alongside the old 1st gen function. +> 4. Gradually introduce traffic to the new function (e.g., via client-side changes or by calling it from the 1st gen function). +> 5. As traffic ramps up to the new 2nd gen function, scale it up by adding back the original `minInstances` and `maxInstances` settings to the 2nd gen function. Reduce the `minInstances` and `maxInstances` settings for the 1st gen function as traffic decreases. +> 6. The 1st gen function can be deleted once it has stopped receiving traffic. diff --git a/.guides/usage.md b/.guides/usage.md new file mode 100644 index 000000000..f110b6f7c --- /dev/null +++ b/.guides/usage.md @@ -0,0 +1,116 @@ +## Key Guidelines + +- Always use 2nd-gen functions for new development. +- Use 1st-gen functions _only_ for Analytics and basic Auth triggers, since those aren't supported by 2nd gen. +- Use `firebase-functions` SDK version 6.0.0 and above +- Use top-level imports (e.g., `firebase-functions/https`). These are 2nd gen by default. If 1st gen is required (Analytics or basic Auth triggers), import from the `firebase-functions/v1` import path. + +## Configuration: Use Secret Params for API Keys + +For sensitive information like API keys (e.g., for LLMs, payment providers, etc.), **always** use `defineSecret`. This stores the value securely in Cloud Secret Manager. + +```typescript +import { onRequest } from "firebase-functions/https"; +import { logger } from "firebase-functions/logger"; +import { defineString, defineSecret } from "firebase-functions/params"; + +// Securely define an LLM API key +const LLM_API_KEY = defineSecret("LLM_API_KEY"); + +// Example function that uses the secret +export const callLlm = onRequest({ secrets: [LLM_API_KEY] }, async (req, res) => { + const apiKey = LLM_API_KEY.value(); + + // Use the apiKey to make a call to the LLM service + logger.info("Calling LLM with API key."); + + // insert code here to call LLM... + + res.send("LLM API call initiated."); +}); +``` + +The CLI will prompt for secret's value at deploy time. Alternatively, a human can set the secret using the Firebase CLI command: + +```bash +firebase functions:secrets:set +``` + +If you see an API key being accessed with `functions.config` in existing functions code, offer to upgrade to params. + +## Use the Firebase Admin SDK + +To interact with Firebase services like Firestore, Auth, or RTDB from within your functions, you need to initialize the Firebase Admin SDK. Call `initializeApp` without any arguments so that Application Default Credentials are used. + +1. **Install the SDK:** + + ```bash + npm i firebase-admin + ``` + +2. **Initialize in your code:** + + ```typescript + import * as admin from "firebase-admin"; + import { onInit } from "firebase-functions"; + + onInit(() => { + admin.initializeApp(); + }); + ``` + + This should be done once at the top level of your `index.ts` file. + +## Common Imports + +```typescript +import { onRequest, onCall, onCallGenkit } from "firebase-functions/https"; +import { onDocumentUpdated } from "firebase-functions/firestore"; +import { onNewFatalIssuePublished } from "firebase-functions/alerts/crashlytics"; +import { onValueWritten } from "firebase-functions/database"; +import { onSchedule } from "firebase-functions/scheduler"; +const { onTaskDispatched } = require("firebase-functions/tasks"); +import { onObjectFinalized } from "firebase-functions/storage"; +import { onMessagePublished } from "firebase-functions/pubsub"; +import { beforeUserSignedIn } from "firebase-functions/identity"; +import { onTestMatrixCompleted } from "firebase-functions/testLab"; +import { logger, onInit } from "firebase-functions"; +import { defineString, defineSecret } from "firebase-functions/params"; +``` + +A human can find code samples for these triggers in the [functions-samples repository](https://github.com/firebase/functions-samples/tree/main/Node). + +## 1st-gen Functions (Legacy Triggers) + +Use the `firebase-functions/v1` import for Analytics and basic Auth triggers. These aren't supported in 2nd gen. + +```typescript +import * as functionsV1 from "firebase-functions/v1"; + +// v1 Analytics trigger +export const onPurchase = functionsV1.analytics.event("purchase").onLog(async (event) => { + logger.info("Purchase event", { value: event.params?.value }); +}); + +// v1 Auth trigger +export const onUserCreate = functionsV1.auth.user().onCreate(async (user) => { + logger.info("User created", { uid: user.uid }); +}); +``` + +## Development Commands + +```bash +# Install dependencies +npm install + +# Compile TypeScript +npm run build + +# Run emulators for local development +# This is a long-running command. A human can run this command themselves to start the emulators: +firebase emulators:start --only functions + +# Deploy functions +firebase deploy --only functions +``` diff --git a/.mocharc.yaml b/.mocharc.yaml new file mode 100644 index 000000000..a23d46ad0 --- /dev/null +++ b/.mocharc.yaml @@ -0,0 +1,8 @@ +exit: true +extension: + - ts +package: ./package.json +reporter: spec +require: + - "ts-node/register" + - "source-map-support/register" diff --git a/.npmignore b/.npmignore deleted file mode 100644 index 4472a5eff..000000000 --- a/.npmignore +++ /dev/null @@ -1,17 +0,0 @@ -.tmp -coverage -.vscode -.idea -tsconfig.* -tslint.* -.travis.yml -.github - -# Don't include the raw typescript -src -spec -integration_test -# TODO(rjh) add back once testing isn't just a joke -testing -lib/testing.* -*.tgz diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 43c97e719..000000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -package-lock=false diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..2011321b8 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,4 @@ +/node_modules +/lib/**/* +/CONTRIBUTING.md +/docgen \ No newline at end of file diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 000000000..d7a429f94 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,3 @@ +module.exports = { + printWidth: 100, +}; \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 8f5bf9f17..000000000 --- a/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: node_js -node_js: -- '6.14.0' -- '8' -- stable -sudo: false diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..ecb712245 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,6 @@ +- BREAKING: Drop support for Node.js 16. Minimum supported version is now Node.js 18. (#1747) +- BREAKING: Remove deprecated `functions.config()` API. Use `params` module for environment variables instead. (#1748) +- BREAKING: Upgrade to TypeScript v5 and target ES2022. (#1746) +- BREAKING: Unhandled errors in async `onRequest` handlers in the Emulator now return a 500 error immediately. (#1755) +- Add support for ESM (ECMAScript Modules) alongside CommonJS. (#1750) +- Add `onMutationExecuted()` trigger for Firebase Data Connect. (#1727) diff --git a/README.md b/README.md index df3ad35af..0c2cc1b59 100644 --- a/README.md +++ b/README.md @@ -8,23 +8,29 @@ Cloud Functions is a hosted, private, and scalable Node.js environment where you Learn more about the Firebase SDK for Cloud Functions in the [Firebase documentation](https://firebase.google.com/docs/functions/) or [check out our samples](https://github.com/firebase/functions-samples). -## Migrating to v1 +Here are some resources to get help: -To migrate from a beta version of firebase-functions to v1, please refer to the [migration guide](https://firebase.google.com/docs/functions/beta-v1-diff). +- [Start with the quickstart](https://firebase.google.com/docs/functions/write-firebase-functions) +- [Go through the guides](https://firebase.google.com/docs/functions/) +- [Read the full API reference](https://firebase.google.com/docs/reference/functions/2nd-gen/node/firebase-functions) +- [Browse some examples](https://github.com/firebase/functions-samples) + +If the official documentation doesn't help, try asking through our [official support channels](https://firebase.google.com/support/) + +_Please avoid double posting across multiple channels!_ ## Usage ```js // functions/index.js -const functions = require('firebase-functions'); -const notifyUsers = require('./notify-users'); - -exports.newPost = functions.database - .ref('/posts/{postId}') - .onCreate((snapshot, context) => { - console.log('Received new post with ID:', context.params.postId); - return notifyUsers(snapshot.val()); - }); +const { onValueCreated } = require("firebase-functions/database"); +const logger = require("firebase-functions/logger"); +const notifyUsers = require("./notify-users"); + +exports.newPost = onValueCreated({ ref: "/posts/{postId}" }, (event) => { + logger.info("Received new post with ID:", event.params.postId); + return notifyUsers(event.data.val()); +}); ``` ## Contributing diff --git a/deploy_key.enc b/deploy_key.enc new file mode 100644 index 000000000..4451e042b Binary files /dev/null and b/deploy_key.enc differ diff --git a/docgen/api-extractor.base.json b/docgen/api-extractor.base.json new file mode 100644 index 000000000..869c825d6 --- /dev/null +++ b/docgen/api-extractor.base.json @@ -0,0 +1,364 @@ +/** + * Config file for API Extractor. For more info, please visit: https://api-extractor.com + */ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + + /** + * Optionally specifies another JSON config file that this file extends from. This provides a way for + * standard settings to be shared across multiple projects. + * + * If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains + * the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be + * resolved using NodeJS require(). + * + * SUPPORTED TOKENS: none + * DEFAULT VALUE: "" + */ + // "extends": "./shared/api-extractor-base.json" + // "extends": "my-package/include/api-extractor-base.json" + + /** + * Determines the "" token that can be used with other config file settings. The project folder + * typically contains the tsconfig.json and package.json config files, but the path is user-defined. + * + * The path is resolved relative to the folder of the config file that contains the setting. + * + * The default value for "projectFolder" is the token "", which means the folder is determined by traversing + * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder + * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error + * will be reported. + * + * SUPPORTED TOKENS: + * DEFAULT VALUE: "" + */ + "projectFolder": "..", + + /** + * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor + * analyzes the symbols exported by this module. + * + * The file extension must be ".d.ts" and not ".ts". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + */ + "mainEntryPointFilePath": "/lib/index.d.ts", + + /** + * A list of NPM package names whose exports should be treated as part of this package. + * + * For example, suppose that Webpack is used to generate a distributed bundle for the project "library1", + * and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part + * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly + * imports library2. To avoid this, we can specify: + * + * "bundledPackages": [ "library2" ], + * + * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been + * local files for library1. + */ + "bundledPackages": [], + + /** + * Determines how the TypeScript compiler engine will be invoked by API Extractor. + */ + "compiler": { + /** + * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * Note: This setting will be ignored if "overrideTsconfig" is used. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/tsconfig.json" + */ + "tsconfigFilePath": "/tsconfig.release.json" + /** + * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk. + * The object must conform to the TypeScript tsconfig schema: + * + * http://json.schemastore.org/tsconfig + * + * If omitted, then the tsconfig.json file will be read from the "projectFolder". + * + * DEFAULT VALUE: no overrideTsconfig section + */ + // "overrideTsconfig": { + // . . . + // } + /** + * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended + * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when + * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses + * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck. + * + * DEFAULT VALUE: false + */ + // "skipLibCheck": true, + }, + + /** + * Configures how the API report file (*.api.md) will be generated. + */ + "apiReport": { + /** + * (REQUIRED) Whether to generate an API report. + */ + "enabled": true, + + /** + * The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce + * a full file path. + * + * The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/". + * + * SUPPORTED TOKENS: , + * DEFAULT VALUE: ".api.md" + */ + // "reportFileName": ".api.md", + + /** + * Specifies the folder where the API report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy, + * e.g. for an API review. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/etc/" + */ + "reportFolder": "/docgen/etc/", + + /** + * Specifies the folder where the temporary report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * After the temporary file is written to disk, it is compared with the file in the "reportFolder". + * If they are different, a production build will fail. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/" + */ + "reportTempFolder": "/docgen/temp/" + }, + + /** + * Configures how the doc model file (*.api.json) will be generated. + */ + "docModel": { + /** + * (REQUIRED) Whether to generate a doc model file. + */ + "enabled": true, + + /** + * The output path for the doc model file. The file extension should be ".api.json". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/.api.json" + */ + "apiJsonFilePath": "/docgen/.api.json" + }, + + /** + * Configures how the .d.ts rollup file will be generated. + */ + "dtsRollup": { + /** + * (REQUIRED) Whether to generate the .d.ts rollup file. + */ + "enabled": true + + /** + * Specifies the output path for a .d.ts rollup file to be generated without any trimming. + * This file will include all declarations that are exported by the main entry point. + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/dist/.d.ts" + */ + // "untrimmedFilePath": "/dist/.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release. + * This file will include only declarations that are marked as "@public" or "@beta". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "betaTrimmedFilePath": "/dist/-beta.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release. + * This file will include only declarations that are marked as "@public". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "publicTrimmedFilePath": "/dist/-public.d.ts", + + /** + * When a declaration is trimmed, by default it will be replaced by a code comment such as + * "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the + * declaration completely. + * + * DEFAULT VALUE: false + */ + // "omitTrimmingComments": true + }, + + /** + * Configures how the tsdoc-metadata.json file will be generated. + */ + "tsdocMetadata": { + /** + * Whether to generate the tsdoc-metadata.json file. + * + * DEFAULT VALUE: true + */ + // "enabled": true, + /** + * Specifies where the TSDoc metadata file should be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * The default value is "", which causes the path to be automatically inferred from the "tsdocMetadata", + * "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup + * falls back to "tsdoc-metadata.json" in the package folder. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "tsdocMetadataFilePath": "/dist/tsdoc-metadata.json" + }, + + /** + * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files + * will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead. + * To use the OS's default newline kind, specify "os". + * + * DEFAULT VALUE: "crlf" + */ + // "newlineKind": "crlf", + + /** + * Configures how API Extractor reports error and warning messages produced during analysis. + * + * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages. + */ + "messages": { + /** + * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing + * the input .d.ts files. + * + * TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "compilerMessageReporting": { + /** + * Configures the default routing for messages that don't match an explicit rule in this table. + */ + "default": { + /** + * Specifies whether the message should be written to the the tool's output log. Note that + * the "addToApiReportFile" property may supersede this option. + * + * Possible values: "error", "warning", "none" + * + * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail + * and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes + * the "--local" option), the warning is displayed but the build will not fail. + * + * DEFAULT VALUE: "warning" + */ + "logLevel": "warning" + + /** + * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md), + * then the message will be written inside that file; otherwise, the message is instead logged according to + * the "logLevel" option. + * + * DEFAULT VALUE: false + */ + // "addToApiReportFile": false + } + + // "TS2551": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by API Extractor during its analysis. + * + * API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag" + * + * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings + */ + "extractorMessageReporting": { + "default": { + "logLevel": "warning" + // "addToApiReportFile": false + } + + // "ae-extra-release-tag": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by the TSDoc parser when analyzing code comments. + * + * TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "tsdocMessageReporting": { + "default": { + "logLevel": "warning" + // "addToApiReportFile": false + } + + // "tsdoc-link-tag-unescaped-text": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + } + } +} diff --git a/docgen/api-extractor.v1.json b/docgen/api-extractor.v1.json new file mode 100644 index 000000000..880990ee1 --- /dev/null +++ b/docgen/api-extractor.v1.json @@ -0,0 +1,14 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "extends": "./api-extractor.base.json", + "mainEntryPointFilePath": "/lib/v1/index.d.ts", + "docModel": { + "enabled": true, + "apiJsonFilePath": "/docgen/v1/firebase-functions.api.json" + }, + "apiReport": { + "enabled": true, + "reportTempFolder": "/docgen/v1/temp", + "reportFolder": "/docgen/v1" + } +} diff --git a/docgen/api-extractor.v2.json b/docgen/api-extractor.v2.json new file mode 100644 index 000000000..2cf0596b7 --- /dev/null +++ b/docgen/api-extractor.v2.json @@ -0,0 +1,14 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "extends": "./api-extractor.base.json", + "mainEntryPointFilePath": "/lib/v2/index.d.ts", + "docModel": { + "enabled": true, + "apiJsonFilePath": "/docgen/v2/firebase-functions.api.json" + }, + "apiReport": { + "enabled": true, + "reportTempFolder": "/docgen/v2/temp", + "reportFolder": "/docgen/v2" + } +} diff --git a/docgen/content-sources/v1/toc.yaml b/docgen/content-sources/v1/toc.yaml new file mode 100644 index 000000000..59e380458 --- /dev/null +++ b/docgen/content-sources/v1/toc.yaml @@ -0,0 +1,176 @@ +toc: + - title: 'functions' + path: /docs/reference/functions/cloud_functions.html + section: + - title: 'CloudFunction' + path: /docs/reference/functions/cloud_functions.html#cloudfunction + - title: 'HttpsFunction' + path: /docs/reference/functions/cloud_functions.html#httpsfunction + - title: 'EventContext' + path: /docs/reference/functions/cloud_functions.eventcontext.html + - title: 'FunctionBuilder' + path: /docs/reference/functions/function_builder.functionbuilder.html + - title: 'Change' + path: /docs/reference/functions/cloud_functions.change.html + - title: 'ChangeJson' + path: /docs/reference/functions/cloud_functions.changejson.html + - title: 'BlockingFunction' + path: /docs/reference/functions/cloud_functions.blockingfunction.html + + - title: 'functions.config' + path: /docs/reference/functions/config.html + section: + - title: 'Config' + path: /docs/reference/functions/config.config-1.html + - title: 'config.Config' + path: /docs/reference/functions/config.config-1.config.html + + - title: 'functions.function-configuration' + path: /docs/reference/functions/function_configuration.html + section: + - title: 'config.DeploymentOptions' + path: /docs/reference/functions/function_configuration.deploymentoptions.html + - title: 'config.FailurePolicy' + path: /docs/reference/functions/function_configuration.failurepolicy.html + - title: 'config.RuntimeOptions' + path: /docs/reference/functions/function_configuration.runtimeoptions.html + - title: 'config.Schedule' + path: /docs/reference/functions/function_configuration.schedule.html + - title: 'config.ScheduleRetryConfig' + path: /docs/reference/functions/function_configuration.scheduleretryconfig.html + + - title: 'functions.analytics' + path: /docs/reference/functions/providers_analytics.html + section: + - title: 'AnalyticsEvent' + path: /docs/reference/functions/providers_analytics.analyticsevent.html + - title: 'AnalyticsEventBuilder' + path: /docs/reference/functions/providers_analytics.analyticseventbuilder.html + - title: 'AppInfo' + path: /docs/reference/functions/providers_analytics.appinfo.html + - title: 'DeviceInfo' + path: /docs/reference/functions/providers_analytics.deviceinfo.html + - title: 'ExportBundleInfo' + path: /docs/reference/functions/providers_analytics.exportbundleinfo.html + - title: 'GeoInfo' + path: /docs/reference/functions/providers_analytics.geoinfo.html + - title: 'UserDimensions' + path: /docs/reference/functions/providers_analytics.userdimensions.html + - title: 'UserPropertyValue' + path: /docs/reference/functions/providers_analytics.userpropertyvalue.html + + - title: 'functions.auth' + path: /docs/reference/functions/providers_auth.html + section: + - title: 'UserBuilder' + path: /docs/reference/functions/providers_auth.userbuilder.html + - title: 'UserInfo' + path: /docs/reference/functions/providers_auth.html#userinfo + - title: 'UserRecordMetadata' + path: /docs/reference/functions/providers_auth.userrecordmetadata.html + - title: 'UserRecord' + path: /docs/reference/functions/providers_auth.html#userrecord + + - title: 'functions.firestore' + path: /docs/reference/functions/providers_firestore.html + section: + - title: 'DocumentBuilder' + path: /docs/reference/functions/providers_firestore.documentbuilder.html + - title: 'DocumentSnapshot' + path: /docs/reference/functions/providers_firestore.html#documentsnapshot + + - title: 'functions.database' + path: /docs/reference/functions/providers_database.html + section: + - title: 'DataSnapshot' + path: /docs/reference/functions/providers_database.datasnapshot.html + - title: 'RefBuilder' + path: /docs/reference/functions/providers_database.refbuilder.html + - title: 'InstanceBuilder' + path: /docs/reference/functions/providers_database.instancebuilder.html + + - title: 'functions.https' + path: /docs/reference/functions/providers_https.html + section: + - title: 'HttpsError' + path: /docs/reference/functions/common_providers_https.httpserror.html + - title: 'CallableRequest' + path: /docs/reference/functions/common_providers_https.callablerequest.html + - title: 'CallableContext' + path: /docs/reference/functions/common_providers_https.callablecontext.html + - title: 'AuthData' + path: /docs/reference/functions/common_providers_https.authdata.html + - title: 'AppCheckData' + path: /docs/reference/functions/common_providers_https.appcheckdata.html + + - title: 'functions.logger' + path: /docs/reference/functions/logger.html + section: + - title: 'LogEntry' + path: /docs/reference/functions/logger.logentry.html + + - title: 'functions.pubsub' + path: /docs/reference/functions/providers_pubsub.html + section: + - title: 'Message' + path: /docs/reference/functions/providers_pubsub.message.html + - title: 'TopicBuilder' + path: /docs/reference/functions/providers_pubsub.topicbuilder.html + - title: 'ScheduleBuilder' + path: /docs/reference/functions/providers_pubsub.schedulebuilder.html + + - title: 'functions.remoteconfig' + path: /docs/reference/functions/providers_remoteconfig.html + section: + - title: 'RemoteConfigUser' + path: /docs/reference/functions/providers_remoteconfig.remoteconfiguser.html + - title: 'TemplateVersion' + path: /docs/reference/functions/providers_remoteconfig.templateversion.html + - title: 'UpdateBuilder' + path: /docs/reference/functions/providers_remoteconfig.updatebuilder.html + + - title: 'functions.storage' + path: /docs/reference/functions/providers_storage.html + section: + - title: 'BucketBuilder' + path: /docs/reference/functions/providers_storage.bucketbuilder.html + - title: 'ObjectBuilder' + path: /docs/reference/functions/providers_storage.objectbuilder.html + - title: 'ObjectMetadata' + path: /docs/reference/functions/providers_storage.objectmetadata.html + + - title: 'functions.tasks' + path: /docs/reference/functions/providers_tasks.html + section: + - title: AuthData + path: /docs/reference/functions/common_providers_tasks.authdata.html + - title: RateLimits + path: /docs/reference/functions/common_providers_tasks.ratelimits.html + - title: RetryConfig + path: /docs/reference/functions/common_providers_tasks.retryconfig.html + - title: TaskContext + path: /docs/reference/functions/common_providers_tasks.taskcontext.html + - title: TaskQueueBuilder + path: /docs/reference/functions/providers_tasks.taskqueuebuilder.html + - title: TaskQueueFunction + path: /docs/reference/functions/providers_tasks.taskqueuefunction.html + - title: TaskQueueOptions + path: /docs/reference/functions/providers_tasks.taskqueueoptions.html + + - title: 'functions.testLab' + path: /docs/reference/functions/providers_testlab.html + section: + - title: 'testLab.clientInfo' + path: /docs/reference/functions/providers_testlab.clientinfo.html + - title: 'testLab.resultStorage' + path: /docs/reference/functions/providers_testlab.resultstorage.html + - title: 'testLab.testMatrix' + path: /docs/reference/functions/providers_testlab.testmatrix.html + - title: 'testLab.testMatrixBuilder' + path: /docs/reference/functions/providers_testlab.testmatrixbuilder.html + + - title: 'functions.handler' + path: /docs/reference/functions/handler_builder.html + section: + - title: 'HandlerBuilder' + path: /docs/reference/functions/handler_builder.handlerbuilder.html diff --git a/docgen/content-sources/v2/toc.yaml b/docgen/content-sources/v2/toc.yaml new file mode 100644 index 000000000..61e330865 --- /dev/null +++ b/docgen/content-sources/v2/toc.yaml @@ -0,0 +1,44 @@ +toc: + - title: 'functions' + path: /docs/functions/alpha/index.html + - title: 'functions.core' + path: /docs/functions/alpha/v2_core.html + section: + - title: 'Functions v2' + path: /docs/functions/alpha/v2.html + - title: 'functions.CloudEvent' + path: /docs/functions/alpha/v2_core.CloudEvent.html + - title: 'functions.CloudFunction' + path: /docs/functions/alpha/v2_core.CloudFunction.html + - title: 'functions.https' + path: /docs/functions/alpha/v2_providers_https.html + section: + - title: 'functions.https.CallableFunction' + path: /docs/functions/alpha/v2_providers_https.CallableFunction.html + - title: 'functions.https.CallableRequest' + path: /docs/functions/alpha/v2_providers_https.CallableRequest.html + - title: 'functions.https.error' + path: /docs/functions/alpha/v2_providers_https.HttpsError.html + - title: 'functions.https.options' + path: /docs/functions/alpha/v2_providers_https.HttpsOptions.html + - title: 'functions.logger' + path: /docs/functions/alpha/logger.html + section: + - title: 'LogEntry' + path: /docs/functions/alpha/logger.LogEntry.html + - title: 'functions.options' + path: /docs/functions/alpha/v2_options.html + section: + - title: 'functions.options.GlobalOptions' + path: /docs/functions/alpha/v2_options.GlobalOptions.html + - title: 'functions.options.EventHandlerOptions' + path: /docs/functions/alpha/v2_options.EventHandlerOptions.html + - title: 'functions.pubsub' + path: /docs/functions/alpha/v2_providers_pubsub.html + section: + - title: 'Message' + path: /docs/functions/alpha/v2_providers_pubsub.Message.html + - title: 'MessagePublishedData' + path: /docs/functions/alpha/v2_providers_pubsub.MessagePublishedData.html + - title: 'PubSubOptions' + path: /docs/functions/alpha/v2_providers_pubsub.PubSubOptions.html diff --git a/docgen/theme/helpers/cleanBreadcrumb.js b/docgen/theme/helpers/cleanBreadcrumb.js new file mode 100644 index 000000000..ad52e64a7 --- /dev/null +++ b/docgen/theme/helpers/cleanBreadcrumb.js @@ -0,0 +1,4 @@ +exports.cleanBreadcrumb = function (value) { + const parts = value.replace(/"/g, '').split('/'); + return parts[parts.length - 1]; +}; diff --git a/docgen/toc.ts b/docgen/toc.ts new file mode 100644 index 000000000..0b5d862b2 --- /dev/null +++ b/docgen/toc.ts @@ -0,0 +1,194 @@ +/** + * Forked of https://github.com/firebase/firebase-js-sdk/blob/5ce06766303b92fea969c58172a7c1ab8695e21e/repo-scripts/api-documenter/src/toc.ts. + * + * Firebase Functions SDK uses namespaces as primary entry points but the theoriginal Firebase api-documenter ignores + * them when generating toc.yaml. A small modification is made to include namespaces and exclude classes when walking + * down the api model. + */ +import * as yaml from 'js-yaml'; +import {ApiItem, ApiItemKind, ApiModel, ApiPackage, ApiParameterListMixin,} from 'api-extractor-model-me'; +import {ModuleSource} from '@microsoft/tsdoc/lib-commonjs/beta/DeclarationReference'; +import {FileSystem, PackageName} from '@rushstack/node-core-library'; +import yargs from 'yargs'; +import {writeFileSync} from 'fs'; +import {join, resolve} from 'path'; + +function getSafeFileName(f: string): string { + return f.replace(/[^a-z0-9_\-\.]/gi, '_').toLowerCase(); +} + +export function getFilenameForApiItem( + apiItem: ApiItem, + addFileNameSuffix: boolean +): string { + if (apiItem.kind === ApiItemKind.Model) { + return 'index.md'; + } + + let baseName: string = ''; + let multipleEntryPoints: boolean = false; + for (const hierarchyItem of apiItem.getHierarchy()) { + // For overloaded methods, add a suffix such as "MyClass.myMethod_2". + let qualifiedName = getSafeFileName(hierarchyItem.displayName); + if (ApiParameterListMixin.isBaseClassOf(hierarchyItem)) { + if (hierarchyItem.overloadIndex > 1) { + // Subtract one for compatibility with earlier releases of API Documenter. + // (This will get revamped when we fix GitHub issue #1308) + qualifiedName += `_${hierarchyItem.overloadIndex - 1}`; + } + } + + switch (hierarchyItem.kind) { + case ApiItemKind.Model: + break; + case ApiItemKind.EntryPoint: + const packageName: string = hierarchyItem.parent!.displayName; + let entryPointName: string = PackageName.getUnscopedName(packageName); + if (multipleEntryPoints) { + entryPointName = `${PackageName.getUnscopedName(packageName)}/${ + hierarchyItem.displayName + }`; + } + baseName = getSafeFileName(entryPointName); + break; + case ApiItemKind.Package: + baseName = getSafeFileName( + PackageName.getUnscopedName(hierarchyItem.displayName) + ); + if ((hierarchyItem as ApiPackage).entryPoints.length > 1) { + multipleEntryPoints = true; + } + break; + case ApiItemKind.Namespace: + baseName += '.' + qualifiedName; + if (addFileNameSuffix) { + baseName += '_n'; + } + break; + case ApiItemKind.Class: + case ApiItemKind.Interface: + baseName += '.' + qualifiedName; + break; + } + } + return baseName + '.md'; +} + +export interface ITocGenerationOptions { + inputFolder: string; + g3Path: string; + outputFolder: string; + addFileNameSuffix: boolean; +} + +interface ITocItem { + title: string; + path: string; + section?: ITocItem[]; +} + +export function generateToc({ + inputFolder, + g3Path, + outputFolder, + addFileNameSuffix, +}: ITocGenerationOptions) { + const apiModel: ApiModel = new ApiModel(); + + for (const filename of FileSystem.readFolder(inputFolder)) { + if (filename.match(/\.api\.json$/i)) { + const filenamePath = join(inputFolder, filename); + apiModel.loadPackage(filenamePath); + } + } + + // Firebase Functions only have 1 entry point. Let's traverse the tree to find it. + const apiItems: ApiItem[] = []; + let cursor = apiModel as ApiItem; + while (cursor?.kind !== ApiItemKind.EntryPoint) { + apiItems.push(...cursor.members); + cursor = apiItems.pop(); + } + if (!cursor) { + throw new Error("Couldn't find entry point from api model. Are you sure you've generated the api model?") + } + + const entryPointName = ( + cursor.canonicalReference.source! as ModuleSource + ).escapedPath.replace('@firebase/', ''); + + const entryPointToc: ITocItem = { + title: entryPointName, + path: `${g3Path}/${getFilenameForApiItem(cursor, addFileNameSuffix)}`, + section: [], + }; + + generateTocRecursively(cursor, g3Path, addFileNameSuffix, entryPointToc); + + writeFileSync( + resolve(outputFolder, 'toc.yaml'), + yaml.dump( + { toc: entryPointToc }, + { + quotingType: '"', + } + ) + ); +} + +function generateTocRecursively( + apiItem: ApiItem, + g3Path: string, + addFileNameSuffix: boolean, + toc: ITocItem +) { + for (const member of apiItem.members) { + // only namespaces/classes gets included in ToC. + if ( + [ + ApiItemKind.Class, + ApiItemKind.Namespace, + ApiItemKind.Interface, + ].includes(member.kind) + ) { + const fileName = getFilenameForApiItem(member, addFileNameSuffix); + const title = + member.displayName[0].toUpperCase() + member.displayName.slice(1); + const section: ITocItem = { + title, + path: `${g3Path}/${fileName}`, + } + if (!toc.section) { + toc.section = []; + } + toc.section.push(section); + generateTocRecursively(member, g3Path, addFileNameSuffix, section); + } + } +} + +const { input, output, path } = yargs(process.argv.slice(2)) + .option('input', { + alias: 'i', + describe: 'input folder containing the *.api.json files to be processed.', + default: './input', + }) + .option('output', { + alias: 'o', + describe: 'destination for the generated toc content.', + default: './toc', + }) + .option('path', { + alias: 'p', + describe: 'specifies the path where the reference docs resides (e.g. g3)', + default: '/', + }) + .help().argv; + +FileSystem.ensureFolder(output); +generateToc({ + inputFolder: input, + g3Path: path, + outputFolder: output, + addFileNameSuffix: false, +}); diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 000000000..2b77805fd --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,112 @@ +const { FlatCompat } = require("@eslint/eslintrc"); +const js = require("@eslint/js"); +const path = require("path"); + +const compat = new FlatCompat({ + baseDirectory: __dirname, + recommendedConfig: js.configs.recommended, + allConfig: js.configs.all +}); + +module.exports = [ + { + ignores: [ + "lib/", + "dev/", + "node_modules/", + "coverage/", + "docgen/", + "v1/", + "v2/", + "logger/", + "dist/", + "spec/fixtures/", + "scripts/**/*.js", + "scripts/**/*.mjs", + "protos/", + ".prettierrc.js", + "eslint.config.*", + "tsdown.config.*", + "scripts/bin-test/sources/esm-ext/index.mjs", + ], + }, + ...compat.extends( + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "plugin:@typescript-eslint/recommended-requiring-type-checking", + "plugin:jsdoc/recommended", + "google", + "prettier" + ), + { + languageOptions: { + parser: require("@typescript-eslint/parser"), + parserOptions: { + project: "tsconfig.json", + tsconfigRootDir: __dirname, + }, + ecmaVersion: 2022 + }, + plugins: { + "prettier": require("eslint-plugin-prettier"), + }, + rules: { + "jsdoc/newline-after-description": "off", + "jsdoc/require-jsdoc": ["warn", { publicOnly: true }], + "jsdoc/check-tag-names": ["warn", { definedTags: ["alpha", "remarks", "typeParam", "packageDocumentation", "hidden"] }], + "no-restricted-globals": ["error", "name", "length"], + "prefer-arrow-callback": "error", + "prettier/prettier": "error", + "require-atomic-updates": "off", // This rule is so noisy and isn't useful: https://github.com/eslint/eslint/issues/11899 + "require-jsdoc": "off", // This rule is deprecated and superseded by jsdoc/require-jsdoc. + "valid-jsdoc": "off", // This is deprecated but included in recommended configs. + "no-prototype-builtins": "warn", + "no-useless-escape": "warn", + "prefer-promise-reject-errors": "warn", + }, + }, + { + files: ["**/*.ts"], + rules: { + "jsdoc/require-param-type": "off", + "jsdoc/require-returns-type": "off", + // Google style guide allows us to omit trivial parameters and returns + "jsdoc/require-param": "off", + "jsdoc/require-returns": "off", + + "@typescript-eslint/no-invalid-this": "error", + "@typescript-eslint/no-unused-vars": ["error", { argsIgnorePattern: "^_", caughtErrorsIgnorePattern: "^_" }], // Unused vars should not exist. + "@typescript-eslint/no-misused-promises": "warn", // rule does not work with async handlers for express. + "no-invalid-this": "off", // Turned off in favor of @typescript-eslint/no-invalid-this. + "no-unused-vars": "off", // Off in favor of @typescript-eslint/no-unused-vars. + eqeqeq: ["error", "always", { null: "ignore" }], + camelcase: ["error", { properties: "never" }], // snake_case allowed in properties iif to satisfy an external contract / style + + // Ideally, all these warning should be error - let's fix them in the future. + "@typescript-eslint/no-unsafe-argument": "warn", + "@typescript-eslint/no-unsafe-assignment": "warn", + "@typescript-eslint/no-unsafe-call": "warn", + "@typescript-eslint/no-unsafe-member-access": "warn", + "@typescript-eslint/no-unsafe-return": "warn", + "@typescript-eslint/restrict-template-expressions": "warn", + "@typescript-eslint/no-explicit-any": "warn", + "@typescript-eslint/no-redundant-type-constituents": "warn", + "@typescript-eslint/no-base-to-string": "warn", + "@typescript-eslint/no-duplicate-type-constituents": "warn", + "@typescript-eslint/no-require-imports": "warn", + "@typescript-eslint/no-empty-object-type": "warn", + "@typescript-eslint/prefer-promise-reject-errors": "warn", + }, + }, + { + files: ["**/*.spec.ts", "**/*.spec.js", "spec/helper.ts", "scripts/bin-test/**/*.ts", "integration_test/**/*.ts"], + languageOptions: { + globals: { + mocha: true, + }, + }, + rules: { + "@typescript-eslint/no-unused-expressions": "off", + } + }, +]; diff --git a/integration_test/README.md b/integration_test/README.md index f972e26cd..3b0f5413f 100644 --- a/integration_test/README.md +++ b/integration_test/README.md @@ -1,12 +1,22 @@ -How to Use ---------- +## How to Use -***ATTENTION***: Running this test will wipe the contents of the Firebase project you run it against. Make sure you use a disposable Firebase project! +**_ATTENTION_**: Running this test will wipe the contents of the Firebase project(s) you run it against. Make sure you use disposable Firebase project(s)! Run the integration test as follows: ```bash -./run_tests.sh +./run_tests.sh [] ``` -The tests run fully automatically, and will print the result on standard out. The integration test for HTTPS is that it properly kicks off other integration tests and returns a result. From there the other integration test suites will write their results back to the database, where you can check the detailed results if you'd like. +Test runs cycles of testing, once for Node.js 14 and another for Node.js 16. + +Test uses locally installed firebase to invoke commands for deploying function. The test also requires that you have +gcloud CLI installed and authenticated (`gcloud auth login`). + +Integration test is triggered by invoking HTTP function integrationTest which in turns invokes each function trigger +by issuing actions necessary to trigger it (e.g. write to storage bucket). + +### Debugging + +The status and result of each test is stored in RTDB of the project used for testing. You can also inspect Cloud Logging +for more clues. diff --git a/integration_test/firebase.json b/integration_test/firebase.json index ce496e265..9662aef03 100644 --- a/integration_test/firebase.json +++ b/integration_test/firebase.json @@ -5,5 +5,10 @@ "firestore": { "rules": "firestore.rules", "indexes": "firestore.indexes.json" + }, + "functions": { + "source": "functions", + "codebase": "integration-tests", + "predeploy": ["npm --prefix \"$RESOURCE_DIR\" run build"] } } diff --git a/integration_test/firestore.rules b/integration_test/firestore.rules index e8f8d7997..d9df6d5d1 100644 --- a/integration_test/firestore.rules +++ b/integration_test/firestore.rules @@ -1,3 +1,5 @@ +rules_version = "2"; + service cloud.firestore { match /databases/{database}/documents { match /{document=**} { diff --git a/integration_test/functions/src/auth-tests.ts b/integration_test/functions/src/auth-tests.ts deleted file mode 100644 index b3e5fe745..000000000 --- a/integration_test/functions/src/auth-tests.ts +++ /dev/null @@ -1,52 +0,0 @@ -import * as functions from 'firebase-functions'; -import { TestSuite, expectEq } from './testing'; -import * as admin from 'firebase-admin'; -import UserMetadata = admin.auth.UserRecord; - -export const createUserTests: any = functions.auth.user().onCreate((u, c) => { - let testId: string = u.displayName; - console.log(`testId is ${testId}`); - - return new TestSuite('auth user onCreate') - .it('should have a project as resource', (user, context) => expectEq( - context.resource.name, `projects/${process.env.GCLOUD_PROJECT}`)) - - .it('should not have a path', (user, context) => expectEq((context as any).path, undefined)) - - .it('should have the correct eventType', (user, context) => expectEq( - context.eventType, 'google.firebase.auth.user.create')) - - .it('should have an eventId', (user, context)=> context.eventId) - - .it('should have a timestamp', (user, context) => context.timestamp) - - .it('should not have auth', (user, context) => expectEq((context as any).auth, undefined)) - - .it('should not have action', (user, context) => expectEq((context as any).action, undefined)) - - .run(testId, u, c); -}); - -export const deleteUserTests: any = functions.auth.user().onDelete((u, c) => { - let testId: string = u.displayName; - console.log(`testId is ${testId}`); - - return new TestSuite('auth user onDelete') - .it('should have a project as resource', (user, context) => expectEq( - context.resource.name, `projects/${process.env.GCLOUD_PROJECT}`)) - - .it('should not have a path', (user, context) => expectEq((context as any).path, undefined)) - - .it('should have the correct eventType', (user, context) => expectEq( - context.eventType, 'google.firebase.auth.user.delete')) - - .it('should have an eventId', (user, context) => context.eventId) - - .it('should have a timestamp', (user, context) => context.timestamp) - - .it('should not have auth', (user, context) => expectEq((context as any).auth, undefined)) - - .it('should not have action', (user, context) => expectEq((context as any).action, undefined)) - - .run(testId, u, c); -}); diff --git a/integration_test/functions/src/database-tests.ts b/integration_test/functions/src/database-tests.ts deleted file mode 100644 index 22d0a6b07..000000000 --- a/integration_test/functions/src/database-tests.ts +++ /dev/null @@ -1,48 +0,0 @@ -import * as functions from 'firebase-functions'; -import { TestSuite, expectEq, expectMatches } from './testing'; -import * as admin from 'firebase-admin'; -import DataSnapshot = admin.database.DataSnapshot; - -const testIdFieldName = 'testId'; - -export const databaseTests: any = functions.database.ref('dbTests/{testId}/start').onWrite((ch, ctx) => { - if (ch.after.val() === null) { - console.log( - 'Event for ' + ctx.params[testIdFieldName] - + ' is null; presuming data cleanup, so skipping.'); - return; - } - - return new TestSuite>('database ref onWrite') - - .it('should not have event.app', (change, context) => !(context as any).app) - - .it('should give refs access to admin data', (change) => - change.after.ref.parent.child('adminOnly').update({ allowed: 1 }).then(() => true)) - - .it('should have a correct ref url', (change) => { - const url = change.after.ref.toString(); - return Promise.resolve().then(() => { - return expectMatches(url, new RegExp(`^https://${process.env.GCLOUD_PROJECT}.firebaseio.com/dbTests`)); - }).then(() => { - return expectMatches(url, /\/start$/); - }); - }) - - .it('should have refs resources', (change, context) => expectEq( - context.resource.name, - `projects/_/instances/${process.env.GCLOUD_PROJECT}/refs/dbTests/${context.params.testId}/start`)) - - .it('should not include path', (change, context) => expectEq((context as any).path, undefined)) - - .it('should have the right eventType', (change, context) => expectEq( - context.eventType, 'google.firebase.database.ref.write')) - - .it('should have eventId', (change, context) => context.eventId) - - .it('should have timestamp', (change, context) => context.timestamp) - - .it('should not have action', (change, context) => expectEq((context as any).action, undefined)) - - .run(ctx.params[testIdFieldName], ch, ctx); -}); diff --git a/integration_test/functions/src/firestore-tests.ts b/integration_test/functions/src/firestore-tests.ts deleted file mode 100644 index 545fd61fd..000000000 --- a/integration_test/functions/src/firestore-tests.ts +++ /dev/null @@ -1,31 +0,0 @@ -import * as functions from 'firebase-functions'; -import { TestSuite, expectEq, expectDeepEq } from './testing'; -import * as admin from 'firebase-admin'; -import DocumentSnapshot = admin.firestore.DocumentSnapshot; - -const testIdFieldName = 'documentId'; - -export const firestoreTests: any = functions.firestore.document('tests/{documentId}').onCreate((s, c) => { - return new TestSuite('firestore document onWrite') - - .it('should not have event.app', (snap, context) => !(context as any).app) - - .it('should give refs write access', (snap) => - snap.ref.set({ allowed: 1 }, {merge: true}).then(() => true)) - - .it('should have well-formatted resource', (snap, context) => expectEq( - context.resource.name, - `projects/${process.env.GCLOUD_PROJECT}/databases/(default)/documents/tests/${context.params.documentId}`) - ) - - .it('should have the right eventType', (snap, context) => expectEq( - context.eventType, 'google.firestore.document.create')) - - .it('should have eventId', (snap, context) => context.eventId) - - .it('should have timestamp', (snap, context) => context.timestamp) - - .it('should have the correct data', (snap, context) => expectDeepEq(snap.data(), {test: context.params.documentId})) - - .run(c.params[testIdFieldName], s, c); -}); diff --git a/integration_test/functions/src/https-tests.ts b/integration_test/functions/src/https-tests.ts deleted file mode 100644 index 4595214e8..000000000 --- a/integration_test/functions/src/https-tests.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as functions from 'firebase-functions'; -import * as _ from 'lodash'; -import { TestSuite, expectEq } from './testing'; - -export const callableTests: any = functions.https.onCall(d => { - return new TestSuite('https onCall') - .it('should have the correct data', data => expectEq(_.get(data, 'foo'), 'bar')) - .run(d.testId, d); -}); diff --git a/integration_test/functions/src/index.ts b/integration_test/functions/src/index.ts index 58c3d4b31..79449cc7b 100644 --- a/integration_test/functions/src/index.ts +++ b/integration_test/functions/src/index.ts @@ -1,100 +1,230 @@ -import * as functions from 'firebase-functions'; -import * as firebase from 'firebase'; -import * as https from 'https'; -import * as admin from 'firebase-admin'; -import { Request, Response } from 'express'; +import { PubSub } from "@google-cloud/pubsub"; +import { GoogleAuth } from "google-auth-library"; +import { Request, Response } from "express"; +import * as admin from "firebase-admin"; +import * as functions from "firebase-functions"; +import fs from "fs"; +import fetch from "node-fetch"; -export * from './pubsub-tests'; -export * from './database-tests'; -export * from './auth-tests'; -export * from './firestore-tests'; -export * from './https-tests'; -const numTests = Object.keys(exports).length; // Assumption: every exported function is its own test. +import * as v1 from "./v1"; +import * as v2 from "./v2"; +const getNumTests = (m: object): number => { + return Object.keys(m).filter((k) => ({}.hasOwnProperty.call(m[k], "__endpoint"))).length; +}; +const numTests = getNumTests(v1) + getNumTests(v2); +export { v1, v2 }; + +import { REGION } from "./region"; +import * as testLab from "./v1/testLab-utils"; -import 'firebase-functions'; // temporary shim until process.env.FIREBASE_CONFIG available natively in GCF(BUG 63586213) const firebaseConfig = JSON.parse(process.env.FIREBASE_CONFIG); -firebase.initializeApp(firebaseConfig); -console.log('initializing admin'); admin.initializeApp(); -// TODO(klimt): Get rid of this once the JS client SDK supports callable triggers. -function callHttpsTrigger(name: string, data: any) { - return new Promise((resolve, reject) => { - const request = https.request({ - method: 'POST', - host: 'us-central1-' + firebaseConfig.projectId + '.cloudfunctions.net', - path: '/' + name, +// Re-enable no-unused-var check once callable functions are testable again. +// eslint-disable-next-line @typescript-eslint/no-unused-vars +async function callHttpsTrigger(name: string, data: any) { + const url = `https://${REGION}-${firebaseConfig.projectId}.cloudfunctions.net/${name}`; + const client = await new GoogleAuth().getIdTokenClient("32555940559.apps.googleusercontent.com"); + const resp = await client.request({ + url, + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ data }), + }); + if (resp.status > 200) { + throw Error(resp.statusText); + } +} + +// Re-enable no-unused-var check once callable functions are testable again. +// eslint-disable-next-line @typescript-eslint/no-unused-vars +async function callV2HttpsTrigger(name: string, data: any, accessToken: string) { + const getFnResp = await fetch( + `https://cloudfunctions.googleapis.com/v2beta/projects/${firebaseConfig.projectId}/locations/${REGION}/functions/${name}`, + { headers: { - 'Content-Type': 'application/json', + Authorization: `Bearer ${accessToken}`, }, - }, (response) => { - let body = ''; - response.on('data', (chunk) => { body += chunk; }); - response.on('end', () => resolve(body)); - }); - request.on('error', reject); - request.write(JSON.stringify({data})); - request.end(); + } + ); + if (!getFnResp.ok) { + throw new Error(getFnResp.statusText); + } + const fn = await getFnResp.json(); + const uri = fn.serviceConfig?.uri; + if (!uri) { + throw new Error(`Cannot call v2 https trigger ${name} - no uri found`); + } + + const client = await new GoogleAuth().getIdTokenClient("32555940559.apps.googleusercontent.com"); + const invokeFnREsp = await client.request({ + url: uri, + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ data }), }); + if (invokeFnREsp.status > 200) { + throw Error(invokeFnREsp.statusText); + } +} + +async function callScheduleTrigger(functionName: string, region: string, accessToken: string) { + const response = await fetch( + `https://cloudscheduler.googleapis.com/v1/projects/${firebaseConfig.projectId}/locations/us-central1/jobs/firebase-schedule-${functionName}-${region}:run`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${accessToken}`, + }, + } + ); + if (!response.ok) { + throw new Error(`Failed request with status ${response.status}!`); + } + const data = await response.text(); + functions.logger.log(`Successfully scheduled function ${functionName}`, data); + return; +} + +async function callV2ScheduleTrigger(functionName: string, region: string, accessToken: string) { + const response = await fetch( + `https://cloudscheduler.googleapis.com/v1/projects/${firebaseConfig.projectId}/locations/us-central1/jobs/firebase-schedule-${functionName}-${region}:run`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${accessToken}`, + }, + } + ); + if (!response.ok) { + throw new Error(`Failed request with status ${response.status}!`); + } + const data = await response.text(); + functions.logger.log(`Successfully scheduled v2 function ${functionName}`, data); + return; } -export const integrationTests: any = functions.https.onRequest((req: Request, resp: Response) => { - let pubsub: any = require('@google-cloud/pubsub')(); +async function updateRemoteConfig(testId: string, accessToken: string): Promise { + const resp = await fetch( + `https://firebaseremoteconfig.googleapis.com/v1/projects/${firebaseConfig.projectId}/remoteConfig`, + { + method: "PUT", + headers: { + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json; UTF-8", + "Accept-Encoding": "gzip", + "If-Match": "*", + }, + body: JSON.stringify({ version: { description: testId } }), + } + ); + if (!resp.ok) { + throw new Error(resp.statusText); + } +} - const testId = firebase.database().ref().push().key; - return Promise.all([ +function v1Tests(testId: string, accessToken: string): Array> { + return [ // A database write to trigger the Firebase Realtime Database tests. - // The database write happens without admin privileges, so that the triggered function's "event.data.ref" also - // doesn't have admin privileges. - firebase.database().ref(`dbTests/${testId}/start`).set({ '.sv': 'timestamp' }), + admin.database().ref(`dbTests/${testId}/start`).set({ ".sv": "timestamp" }), // A Pub/Sub publish to trigger the Cloud Pub/Sub tests. - pubsub.topic('pubsubTests').publish({ testId }), + new PubSub().topic("pubsubTests").publish(Buffer.from(JSON.stringify({ testId }))), // A user creation to trigger the Firebase Auth user creation tests. - admin.auth().createUser({ - email: `${testId}@fake.com`, - password: 'secret', - displayName: `${testId}`, - }).then(userRecord => { - // A user deletion to trigger the Firebase Auth user deletion tests. - admin.auth().deleteUser(userRecord.uid); - }), + admin + .auth() + .createUser({ + email: `${testId}@fake.com`, + password: "secret", + displayName: `${testId}`, + }) + .then(async (userRecord) => { + // A user deletion to trigger the Firebase Auth user deletion tests. + await admin.auth().deleteUser(userRecord.uid); + }), // A firestore write to trigger the Cloud Firestore tests. - admin.firestore().collection('tests').doc(testId).set({test: testId}), + admin.firestore().collection("tests").doc(testId).set({ test: testId }), // Invoke a callable HTTPS trigger. - callHttpsTrigger('callableTests', {foo: 'bar', testId}), + // TODO: Temporarily disable - doesn't work unless running on projects w/ permission to create public functions. + // callHttpsTrigger("v1-callableTests", { foo: "bar", testId }), + // A Remote Config update to trigger the Remote Config tests. + updateRemoteConfig(testId, accessToken), + // A storage upload to trigger the Storage tests + admin + .storage() + .bucket() + .upload("/tmp/" + testId + ".txt"), + testLab.startTestRun(firebaseConfig.projectId, testId, accessToken), + // Invoke the schedule for our scheduled function to fire + callScheduleTrigger("v1-schedule", "us-central1", accessToken), + ]; +} + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function v2Tests(testId: string, accessToken: string): Array> { + return [ + // Invoke a callable HTTPS trigger. + // TODO: Temporarily disable - doesn't work unless running on projects w/ permission to create public functions. + // callV2HttpsTrigger("v2-callabletests", { foo: "bar", testId }, accessToken), + // Invoke a scheduled trigger. + callV2ScheduleTrigger("v2-schedule", "us-central1", accessToken), + ]; +} - ]).then(() => { - // On test completion, check that all tests pass and reply "PASS", or provide further details. - console.log('Waiting for all tests to report they pass...'); - let ref = admin.database().ref(`testRuns/${testId}`); - return new Promise((resolve, reject) => { - let testsExecuted = 0; - ref.on('child_added', (snapshot) => { - testsExecuted += 1; - if (!snapshot.val().passed) { - reject(new Error(`test ${snapshot.key} failed; see database for details.`)); - return; - } - console.log(`${snapshot.key} passed (${testsExecuted} of ${numTests})`); - if (testsExecuted < numTests) { - // Not all tests have completed. Wait longer. - return; - } - // All tests have passed! - resolve(); +export const integrationTests: any = functions + .region(REGION) + .runWith({ + timeoutSeconds: 540, + invoker: "private", + }) + .https.onRequest(async (req: Request, resp: Response) => { + const testId = admin.database().ref().push().key; + await admin.database().ref(`testRuns/${testId}/timestamp`).set(Date.now()); + const testIdRef = admin.database().ref(`testRuns/${testId}`); + functions.logger.info("testId is: ", testId); + fs.writeFile(`/tmp/${testId}.txt`, "test", () => undefined); + try { + const accessToken = await admin.credential.applicationDefault().getAccessToken(); + await Promise.all([ + ...v1Tests(testId, accessToken.access_token), + ...v2Tests(testId, accessToken.access_token), + ]); + // On test completion, check that all tests pass and reply "PASS", or provide further details. + functions.logger.info("Waiting for all tests to report they pass..."); + await new Promise((resolve, reject) => { + setTimeout(() => reject(new Error("Timeout")), 5 * 60 * 1000); + let testsExecuted = 0; + testIdRef.on("child_added", (snapshot) => { + if (snapshot.key === "timestamp") { + return; + } + testsExecuted += 1; + if (!snapshot.val().passed) { + reject(new Error(`test ${snapshot.key} failed; see database for details.`)); + return; + } + functions.logger.info(`${snapshot.key} passed (${testsExecuted} of ${numTests})`); + if (testsExecuted < numTests) { + // Not all tests have completed. Wait longer. + return; + } + // All tests have passed! + resolve(); + }); }); - }).then(() => { - ref.off(); // No more need to listen. - return Promise.resolve(); - }).catch(err => { - ref.off(); // No more need to listen. - return Promise.reject(err); - }); - }).then(() => { - console.log('All tests pass!'); - resp.status(200).send('PASS'); - }).catch(err => { - console.log(`Some tests failed: ${err}`); - resp.status(500).send(`FAIL - details at https://${process.env.GCLOUD_PROJECT}.firebaseio.com/testRuns/${testId}`); + functions.logger.info("All tests pass!"); + resp.status(200).send("PASS \n"); + } catch (err) { + functions.logger.info(`Some tests failed: ${err}`, err); + resp + .status(500) + .send(`FAIL - details at ${functions.firebaseConfig().databaseURL}/testRuns/${testId}`); + } finally { + testIdRef.off("child_added"); + } }); -}); diff --git a/integration_test/functions/src/pubsub-tests.ts b/integration_test/functions/src/pubsub-tests.ts deleted file mode 100644 index 7919a23f7..000000000 --- a/integration_test/functions/src/pubsub-tests.ts +++ /dev/null @@ -1,42 +0,0 @@ -import * as functions from 'firebase-functions'; -import { TestSuite, expectEq, evaluate } from './testing'; -import PubsubMessage = functions.pubsub.Message; - -// TODO(inlined) use multiple queues to run inline. -// Expected message data: {"hello": "world"} -export const pubsubTests: any = functions.pubsub.topic('pubsubTests').onPublish((m, c) => { - let testId: string; - try { - testId = m.json.testId; - } catch (e) { - /* Ignored. Covered in another test case that `event.data.json` works. */ - } - - return new TestSuite('pubsub onPublish') - .it('should have a topic as resource', (message, context) => expectEq( - context.resource.name, `projects/${process.env.GCLOUD_PROJECT}/topics/pubsubTests`)) - - .it('should not have a path', (message, context) => expectEq((context as any).path, undefined)) - - .it('should have the correct eventType', (message, context) => expectEq( - context.eventType, 'google.pubsub.topic.publish')) - - .it('should have an eventId', (message, context) => context.eventId) - - .it('should have a timestamp', (message, context) => context.timestamp) - - .it('should not have auth', (message, context) => expectEq((context as any).auth, undefined)) - - .it('should not have action', (message, context) => expectEq((context as any).action, undefined)) - - .it('should have pubsub data', (message) => { - const decoded = (new Buffer(message.data, 'base64')).toString(); - const parsed = JSON.parse(decoded); - return evaluate(parsed.hasOwnProperty('testId'), 'Raw data was: ' + message.data); - }) - - .it('should decode JSON payloads with the json helper', (message) => - evaluate(message.json.hasOwnProperty('testId'), message.json)) - - .run(testId, m, c); -}); diff --git a/integration_test/functions/src/region.ts b/integration_test/functions/src/region.ts new file mode 100644 index 000000000..4ce175234 --- /dev/null +++ b/integration_test/functions/src/region.ts @@ -0,0 +1,2 @@ +// TODO: Add back support for selecting region for integration test once params is ready. +export const REGION = "us-central1"; diff --git a/integration_test/functions/src/testing.ts b/integration_test/functions/src/testing.ts index 765dfacf0..156e94242 100644 --- a/integration_test/functions/src/testing.ts +++ b/integration_test/functions/src/testing.ts @@ -1,9 +1,10 @@ -import * as firebase from 'firebase-admin'; -import * as _ from 'lodash'; -import { EventContext } from 'firebase-functions'; +import * as firebase from "firebase-admin"; +import * as functions from "firebase-functions"; -export type TestCase = (data: T, context?: EventContext) => any -export type TestCaseMap = { [key: string]: TestCase }; +export type TestCase = (data: T, context?: functions.EventContext) => any; +export interface TestCaseMap { + [key: string]: TestCase; +} export class TestSuite { private name: string; @@ -19,37 +20,42 @@ export class TestSuite { return this; } - run(testId: string, data: T, context?: EventContext): Promise { - let running: Array> = []; - for (let testName in this.tests) { - if (!this.tests.hasOwnProperty(testName)) { continue; } + run(testId: string, data: T, context?: functions.EventContext): Promise { + const running: Array> = []; + for (const testName in this.tests) { + if (!this.tests.hasOwnProperty(testName)) { + continue; + } const run = Promise.resolve() .then(() => this.tests[testName](data, context)) .then( - (result) => { - console.log(`${result ? 'Passed' : 'Failed with successful op'}: ${testName}`); - return { name: testName, passed: !!result }; - }, - (error) => { - console.error(`Failed: ${testName}`, error); - return { name: testName, passed: 0, error: error }; - } + (result) => { + functions.logger.info( + `${result ? "Passed" : "Failed with successful op"}: ${testName}` + ); + return { name: testName, passed: !!result }; + }, + (error) => { + console.error(`Failed: ${testName}`, error); + return { name: testName, passed: 0, error }; + } ); running.push(run); } return Promise.all(running).then((results) => { let sum = 0; - results.forEach((val) => sum = sum + val.passed); + // eslint-disable-next-line @typescript-eslint/restrict-plus-operands + results.forEach((val) => (sum = sum + val.passed)); const summary = `passed ${sum} of ${running.length}`; const passed = sum === running.length; - console.log(summary); + functions.logger.info(summary); const result = { passed, summary, tests: results }; return firebase.database().ref(`testRuns/${testId}/${this.name}`).set(result); - }).then(() => null); + }); } } -function success() { +export function success() { return Promise.resolve().then(() => true); } @@ -57,40 +63,72 @@ function failure(reason: string) { return Promise.reject(reason); } -export function evaluate(value, errMsg) { +export function evaluate(value: boolean, errMsg: string) { if (value) { return success(); } return failure(errMsg); } -export function expectEq(left, right) { +export function expectEq(left: any, right: any) { return evaluate( left === right, - JSON.stringify(left) + ' does not equal ' + JSON.stringify(right)); + JSON.stringify(left) + " does not equal " + JSON.stringify(right) + ); } -export function expectDeepEq(left, right) { +function deepEq(left: any, right: any) { + if (left === right) { + return true; + } + + if (!(left instanceof Object && right instanceof Object)) { + return false; + } + + if (Object.keys(left).length !== Object.keys(right).length) { + return false; + } + + for (const key in left) { + if (Object.prototype.hasOwnProperty.call(left, key)) { + if (!Object.prototype.hasOwnProperty.call(right, key)) { + return false; + } + if (!deepEq(left[key], right[key])) { + return false; + } + } + } + + return true; +} + +export function expectDeepEq(left: any, right: any) { return evaluate( - _.isEqual(left, right), - JSON.stringify(left) + ' does not equal ' + JSON.stringify(right)); + deepEq(left, right), + `${JSON.stringify(left)} does not deep equal ${JSON.stringify(right)}` + ); } -export function expectMatches(input: string, regexp) { +export function expectMatches(input: string, regexp: RegExp) { return evaluate( - input.match(regexp), - "Input '" + input + "' did not match regexp '" + regexp + "'"); + input.match(regexp) !== null, + `Input '${input}' did not match regexp '${regexp}'` + ); } -export function expectReject(f) { - return function (event) { - return Promise.resolve() - .then(() => f(event)) - .then( - () => { - throw new Error('Test should have returned a rejected promise'); - }, - () => true, // A rejection is what we expected, and so is a positive result. - ); +export function expectReject(f: (e: EventType) => Promise) { + return async (event: EventType) => { + let rejected = false; + try { + await f(event); + } catch { + rejected = true; + } + + if (!rejected) { + throw new Error("Test should have returned a rejected promise"); + } }; } diff --git a/integration_test/functions/src/v1/auth-tests.ts b/integration_test/functions/src/v1/auth-tests.ts new file mode 100644 index 000000000..5d1b6188a --- /dev/null +++ b/integration_test/functions/src/v1/auth-tests.ts @@ -0,0 +1,65 @@ +import * as admin from "firebase-admin"; +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, TestSuite } from "../testing"; +import UserMetadata = admin.auth.UserRecord; + +export const createUserTests: any = functions + .region(REGION) + .auth.user() + .onCreate((u, c) => { + const testId: string = u.displayName; + functions.logger.info(`testId is ${testId}`); + + return new TestSuite("auth user onCreate") + .it("should have a project as resource", (user, context) => + expectEq(context.resource.name, `projects/${process.env.GCLOUD_PROJECT}`) + ) + + .it("should not have a path", (user, context) => expectEq((context as any).path, undefined)) + + .it("should have the correct eventType", (user, context) => + expectEq(context.eventType, "google.firebase.auth.user.create") + ) + + .it("should have an eventId", (user, context) => context.eventId) + + .it("should have a timestamp", (user, context) => context.timestamp) + + .it("should not have auth", (user, context) => expectEq((context as any).auth, undefined)) + + .it("should not have action", (user, context) => expectEq((context as any).action, undefined)) + + .it("should have properly defined meta", (user) => user.metadata) + + .run(testId, u, c); + }); + +export const deleteUserTests: any = functions + .region(REGION) + .auth.user() + .onDelete((u, c) => { + const testId: string = u.displayName; + functions.logger.info(`testId is ${testId}`); + + return new TestSuite("auth user onDelete") + .it("should have a project as resource", (user, context) => + expectEq(context.resource.name, `projects/${process.env.GCLOUD_PROJECT}`) + ) + + .it("should not have a path", (user, context) => expectEq((context as any).path, undefined)) + + .it("should have the correct eventType", (user, context) => + expectEq(context.eventType, "google.firebase.auth.user.delete") + ) + + .it("should have an eventId", (user, context) => context.eventId) + + .it("should have a timestamp", (user, context) => context.timestamp) + + .it("should not have auth", (user, context) => expectEq((context as any).auth, undefined)) + + .it("should not have action", (user, context) => expectEq((context as any).action, undefined)) + + .run(testId, u, c); + }); diff --git a/integration_test/functions/src/v1/database-tests.ts b/integration_test/functions/src/v1/database-tests.ts new file mode 100644 index 000000000..df9d3cdd2 --- /dev/null +++ b/integration_test/functions/src/v1/database-tests.ts @@ -0,0 +1,75 @@ +import * as admin from "firebase-admin"; +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, expectMatches, TestSuite } from "../testing"; +import DataSnapshot = admin.database.DataSnapshot; + +const testIdFieldName = "testId"; + +export const databaseTests: any = functions + .region(REGION) + .database.ref("dbTests/{testId}/start") + .onWrite((ch, ctx) => { + if (ch.after.val() === null) { + functions.logger.info( + `Event for ${ctx.params[testIdFieldName]} is null; presuming data cleanup, so skipping.` + ); + return; + } + + return new TestSuite>("database ref onWrite") + + .it("should not have event.app", (change, context) => !(context as any).app) + + .it("should give refs access to admin data", (change) => + change.after.ref.parent + .child("adminOnly") + .update({ allowed: 1 }) + .then(() => true) + ) + + .it("should have a correct ref url", (change) => { + const url = change.after.ref.toString(); + return Promise.resolve() + .then(() => { + return expectMatches( + url, + new RegExp( + `^https://${process.env.GCLOUD_PROJECT}(-default-rtdb)*.firebaseio.com/dbTests` + ) + ); + }) + .then(() => { + return expectMatches(url, /\/start$/); + }); + }) + + .it("should have refs resources", (change, context) => + expectMatches( + context.resource.name, + new RegExp( + `^projects/_/instances/${process.env.GCLOUD_PROJECT}(-default-rtdb)*/refs/dbTests/${context.params.testId}/start$` + ) + ) + ) + + .it("should not include path", (change, context) => + expectEq((context as any).path, undefined) + ) + + .it("should have the right eventType", (change, context) => + expectEq(context.eventType, "google.firebase.database.ref.write") + ) + + .it("should have eventId", (change, context) => context.eventId) + + .it("should have timestamp", (change, context) => context.timestamp) + + .it("should not have action", (change, context) => + expectEq((context as any).action, undefined) + ) + + .it("should have admin authType", (change, context) => expectEq(context.authType, "ADMIN")) + + .run(ctx.params[testIdFieldName], ch, ctx); + }); diff --git a/integration_test/functions/src/v1/firestore-tests.ts b/integration_test/functions/src/v1/firestore-tests.ts new file mode 100644 index 000000000..b986ca06a --- /dev/null +++ b/integration_test/functions/src/v1/firestore-tests.ts @@ -0,0 +1,44 @@ +import * as admin from "firebase-admin"; +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectDeepEq, expectEq, TestSuite } from "../testing"; +import DocumentSnapshot = admin.firestore.DocumentSnapshot; + +const testIdFieldName = "documentId"; + +export const firestoreTests: any = functions + .runWith({ + timeoutSeconds: 540, + }) + .region(REGION) + .firestore.document("tests/{documentId}") + .onCreate((s, c) => { + return new TestSuite("firestore document onWrite") + + .it("should not have event.app", (snap, context) => !(context as any).app) + + .it("should give refs write access", (snap) => + snap.ref.set({ allowed: 1 }, { merge: true }).then(() => true) + ) + + .it("should have well-formatted resource", (snap, context) => + expectEq( + context.resource.name, + `projects/${process.env.GCLOUD_PROJECT}/databases/(default)/documents/tests/${context.params.documentId}` + ) + ) + + .it("should have the right eventType", (snap, context) => + expectEq(context.eventType, "google.firestore.document.create") + ) + + .it("should have eventId", (snap, context) => context.eventId) + + .it("should have timestamp", (snap, context) => context.timestamp) + + .it("should have the correct data", (snap, context) => + expectDeepEq(snap.data(), { test: context.params.documentId }) + ) + + .run(c.params[testIdFieldName], s, c); + }); diff --git a/integration_test/functions/src/v1/https-tests.ts b/integration_test/functions/src/v1/https-tests.ts new file mode 100644 index 000000000..5a74a1903 --- /dev/null +++ b/integration_test/functions/src/v1/https-tests.ts @@ -0,0 +1,12 @@ +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, TestSuite } from "../testing"; + +export const callableTests: any = functions + .runWith({ invoker: "private" }) + .region(REGION) + .https.onCall((d) => { + return new TestSuite("https onCall") + .it("should have the correct data", (data: any) => expectEq(data?.foo, "bar")) + .run(d.testId, d); + }); diff --git a/integration_test/functions/src/v1/index.ts b/integration_test/functions/src/v1/index.ts new file mode 100644 index 000000000..0a1a2a35f --- /dev/null +++ b/integration_test/functions/src/v1/index.ts @@ -0,0 +1,9 @@ +export * from "./pubsub-tests"; +export * from "./database-tests"; +export * from "./auth-tests"; +export * from "./firestore-tests"; +// Temporarily disable http test - will not work unless running on projects w/ permission to create public functions. +// export * from "./https-tests"; +export * from "./remoteConfig-tests"; +export * from "./storage-tests"; +export * from "./testLab-tests"; diff --git a/integration_test/functions/src/v1/pubsub-tests.ts b/integration_test/functions/src/v1/pubsub-tests.ts new file mode 100644 index 000000000..866e3218d --- /dev/null +++ b/integration_test/functions/src/v1/pubsub-tests.ts @@ -0,0 +1,67 @@ +import * as admin from "firebase-admin"; +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { evaluate, expectEq, success, TestSuite } from "../testing"; +import PubsubMessage = functions.pubsub.Message; + +// TODO(inlined) use multiple queues to run inline. +// Expected message data: {"hello": "world"} +export const pubsubTests: any = functions + .region(REGION) + .pubsub.topic("pubsubTests") + .onPublish((m, c) => { + let testId: string; + try { + testId = m.json.testId; + } catch (_e) { + // Ignored. Covered in another test case that `event.data.json` works. + } + + return new TestSuite("pubsub onPublish") + .it("should have a topic as resource", (message, context) => + expectEq(context.resource.name, `projects/${process.env.GCLOUD_PROJECT}/topics/pubsubTests`) + ) + + .it("should not have a path", (message, context) => + expectEq((context as any).path, undefined) + ) + + .it("should have the correct eventType", (message, context) => + expectEq(context.eventType, "google.pubsub.topic.publish") + ) + + .it("should have an eventId", (message, context) => context.eventId) + + .it("should have a timestamp", (message, context) => context.timestamp) + + .it("should not have auth", (message, context) => expectEq((context as any).auth, undefined)) + + .it("should not have action", (message, context) => + expectEq((context as any).action, undefined) + ) + + .it("should have pubsub data", (message) => { + const decoded = new Buffer(message.data, "base64").toString(); + const parsed = JSON.parse(decoded); + return evaluate(parsed.hasOwnProperty("testId"), `Raw data was + ${message.data}`); + }) + + .it("should decode JSON payloads with the json helper", (message) => + evaluate(message.json.hasOwnProperty("testId"), message.json) + ) + + .run(testId, m, c); + }); + +export const schedule: any = functions + .region(REGION) + .pubsub.schedule("every 10 hours") // This is a dummy schedule, since we need to put a valid one in. + // For the test, the job is triggered by the jobs:run api + .onRun(async () => { + const db = admin.database(); + const snap = await db.ref("testRuns").orderByChild("timestamp").limitToLast(1).once("value"); + const testId = Object.keys(snap.val())[0]; + return new TestSuite("pubsub scheduleOnRun") + .it("should trigger when the scheduler fires", () => success()) + .run(testId, null); + }); diff --git a/integration_test/functions/src/v1/remoteConfig-tests.ts b/integration_test/functions/src/v1/remoteConfig-tests.ts new file mode 100644 index 000000000..416621774 --- /dev/null +++ b/integration_test/functions/src/v1/remoteConfig-tests.ts @@ -0,0 +1,23 @@ +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, TestSuite } from "../testing"; +import TemplateVersion = functions.remoteConfig.TemplateVersion; + +export const remoteConfigTests: any = functions.region(REGION).remoteConfig.onUpdate((v, c) => { + return new TestSuite("remoteConfig onUpdate") + .it("should have a project as resource", (version, context) => + expectEq(context.resource.name, `projects/${process.env.GCLOUD_PROJECT}`) + ) + + .it("should have the correct eventType", (version, context) => + expectEq(context.eventType, "google.firebase.remoteconfig.update") + ) + + .it("should have an eventId", (version, context) => context.eventId) + + .it("should have a timestamp", (version, context) => context.timestamp) + + .it("should not have auth", (version, context) => expectEq((context as any).auth, undefined)) + + .run(v.description, v, c); +}); diff --git a/integration_test/functions/src/v1/storage-tests.ts b/integration_test/functions/src/v1/storage-tests.ts new file mode 100644 index 000000000..6819c7a2a --- /dev/null +++ b/integration_test/functions/src/v1/storage-tests.ts @@ -0,0 +1,28 @@ +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, TestSuite } from "../testing"; +import ObjectMetadata = functions.storage.ObjectMetadata; + +export const storageTests: any = functions + .runWith({ + timeoutSeconds: 540, + }) + .region(REGION) + .storage.bucket() + .object() + .onFinalize((s, c) => { + const testId = s.name.split(".")[0]; + return new TestSuite("storage object finalize") + + .it("should not have event.app", (data, context) => !(context as any).app) + + .it("should have the right eventType", (snap, context) => + expectEq(context.eventType, "google.storage.object.finalize") + ) + + .it("should have eventId", (snap, context) => context.eventId) + + .it("should have timestamp", (snap, context) => context.timestamp) + + .run(testId, s, c); + }); diff --git a/integration_test/functions/src/v1/testLab-tests.ts b/integration_test/functions/src/v1/testLab-tests.ts new file mode 100644 index 000000000..242cd21f6 --- /dev/null +++ b/integration_test/functions/src/v1/testLab-tests.ts @@ -0,0 +1,23 @@ +import * as functions from "firebase-functions"; +import { REGION } from "../region"; +import { expectEq, TestSuite } from "../testing"; +import TestMatrix = functions.testLab.TestMatrix; + +export const testLabTests: any = functions + .runWith({ + timeoutSeconds: 540, + }) + .region(REGION) + .testLab.testMatrix() + .onComplete((matrix, context) => { + return new TestSuite("test matrix complete") + .it("should have eventId", (snap, context) => context.eventId) + + .it("should have right eventType", (_, context) => + expectEq(context.eventType, "google.testing.testMatrix.complete") + ) + + .it("should be in state 'INVALID'", (matrix) => expectEq(matrix.state, "INVALID")) + + .run(matrix?.clientInfo?.details?.testId, matrix, context); + }); diff --git a/integration_test/functions/src/v1/testLab-utils.ts b/integration_test/functions/src/v1/testLab-utils.ts new file mode 100644 index 000000000..7ba32e112 --- /dev/null +++ b/integration_test/functions/src/v1/testLab-utils.ts @@ -0,0 +1,112 @@ +import * as admin from "firebase-admin"; +import fetch from "node-fetch"; + +interface AndroidDevice { + androidModelId: string; + androidVersionId: string; + locale: string; + orientation: string; +} + +const TESTING_API_SERVICE_NAME = "testing.googleapis.com"; + +/** + * Creates a new TestMatrix in Test Lab which is expected to be rejected as + * invalid. + * + * @param projectId Project for which the test run will be created + * @param testId Test id which will be encoded in client info details + * @param accessToken accessToken to attach to requested for authentication + */ +export async function startTestRun(projectId: string, testId: string, accessToken: string) { + const device = await fetchDefaultDevice(accessToken); + return await createTestMatrix(accessToken, projectId, testId, device); +} + +async function fetchDefaultDevice(accessToken: string): Promise { + const resp = await fetch( + `https://${TESTING_API_SERVICE_NAME}/v1/testEnvironmentCatalog/ANDROID`, + { + headers: { + Authorization: "Bearer " + accessToken, + "Content-Type": "application/json", + }, + } + ); + if (!resp.ok) { + throw new Error(resp.statusText); + } + const data = await resp.json(); + const models = data?.androidDeviceCatalog?.models || []; + const defaultModels = models.filter( + (m) => + m.tags !== undefined && + m.tags.indexOf("default") > -1 && + m.supportedVersionIds !== undefined && + m.supportedVersionIds.length > 0 + ); + + if (defaultModels.length === 0) { + throw new Error("No default device found"); + } + + const model = defaultModels[0]; + const versions = model.supportedVersionIds; + + return { + androidModelId: model.id, + androidVersionId: versions[versions.length - 1], + locale: "en", + orientation: "portrait", + } as AndroidDevice; +} + +async function createTestMatrix( + accessToken: string, + projectId: string, + testId: string, + device: AndroidDevice +): Promise { + const body = { + projectId, + testSpecification: { + androidRoboTest: { + appApk: { + gcsPath: "gs://path/to/non-existing-app.apk", + }, + }, + }, + environmentMatrix: { + androidDeviceList: { + androidDevices: [device], + }, + }, + resultStorage: { + googleCloudStorage: { + gcsPath: "gs://" + admin.storage().bucket().name, + }, + }, + clientInfo: { + name: "CloudFunctionsSDKIntegrationTest", + clientInfoDetails: { + key: "testId", + value: testId, + }, + }, + }; + const resp = await fetch( + `https://${TESTING_API_SERVICE_NAME}/v1/projects/${projectId}/testMatrices`, + { + method: "POST", + headers: { + Authorization: "Bearer " + accessToken, + "Content-Type": "application/json", + }, + body: JSON.stringify(body), + } + ); + if (!resp.ok) { + throw new Error(resp.statusText); + } + return; +} diff --git a/integration_test/functions/src/v2/https-tests.ts b/integration_test/functions/src/v2/https-tests.ts new file mode 100644 index 000000000..b787ac602 --- /dev/null +++ b/integration_test/functions/src/v2/https-tests.ts @@ -0,0 +1,8 @@ +import { onCall } from "firebase-functions/v2/https"; +import { expectEq, TestSuite } from "../testing"; + +export const callabletests = onCall({ invoker: "private" }, (req) => { + return new TestSuite("v2 https onCall") + .it("should have the correct data", (data: any) => expectEq(data?.foo, "bar")) + .run(req.data.testId, req.data); +}); diff --git a/integration_test/functions/src/v2/index.ts b/integration_test/functions/src/v2/index.ts new file mode 100644 index 000000000..38cde5f92 --- /dev/null +++ b/integration_test/functions/src/v2/index.ts @@ -0,0 +1,7 @@ +import { setGlobalOptions } from "firebase-functions/v2"; +import { REGION } from "../region"; +setGlobalOptions({ region: REGION }); + +// TODO: Temporarily disable - doesn't work unless running on projects w/ permission to create public functions. +// export * from './https-tests'; +export * from "./scheduled-tests"; diff --git a/integration_test/functions/src/v2/scheduled-tests.ts b/integration_test/functions/src/v2/scheduled-tests.ts new file mode 100644 index 000000000..cc13bed62 --- /dev/null +++ b/integration_test/functions/src/v2/scheduled-tests.ts @@ -0,0 +1,19 @@ +import * as admin from "firebase-admin"; +import { onSchedule } from "firebase-functions/v2/scheduler"; +import { REGION } from "../region"; +import { success, TestSuite } from "../testing"; + +export const schedule: any = onSchedule( + { + schedule: "every 10 hours", + region: REGION, + }, + async () => { + const db = admin.database(); + const snap = await db.ref("testRuns").orderByChild("timestamp").limitToLast(1).once("value"); + const testId = Object.keys(snap.val())[0]; + return new TestSuite("scheduler scheduleOnRun") + .it("should trigger when the scheduler fires", () => success()) + .run(testId, null); + } +); diff --git a/integration_test/functions/tsconfig.json b/integration_test/functions/tsconfig.json index 554bd3a6b..77fb279d5 100644 --- a/integration_test/functions/tsconfig.json +++ b/integration_test/functions/tsconfig.json @@ -1,16 +1,12 @@ { "compilerOptions": { - "lib": ["es6"], + "lib": ["es6", "dom"], "module": "commonjs", - "target": "es6", + "target": "es2020", "noImplicitAny": false, "outDir": "lib", "declaration": true, - "typeRoots": [ - "node_modules/@types" - ] + "typeRoots": ["node_modules/@types"] }, - "files": [ - "src/index.ts" - ] + "files": ["src/index.ts"] } diff --git a/integration_test/functions/package.json b/integration_test/package.json.template similarity index 50% rename from integration_test/functions/package.json rename to integration_test/package.json.template index 746d9526d..42cdf121c 100644 --- a/integration_test/functions/package.json +++ b/integration_test/package.json.template @@ -5,16 +5,18 @@ "build": "./node_modules/.bin/tsc" }, "dependencies": { - "@google-cloud/pubsub": "^0.6.0", - "@types/lodash": "^4.14.41", - "firebase": "^4.9.1", - "firebase-admin": "~5.12.1", - "firebase-functions": "./firebase-functions.tgz", - "lodash": "^4.17.2" + "@google-cloud/pubsub": "^2.10.0", + "firebase-admin": "__FIREBASE_ADMIN__", + "firebase-functions": "__SDK_TARBALL__", + "node-fetch": "^2.6.7" }, "main": "lib/index.js", "devDependencies": { - "typescript": "~2.8.3" + "@types/node-fetch": "^2.6.1", + "typescript": "^4.3.5" + }, + "engines": { + "node": "__NODE_VERSION__" }, "private": true } diff --git a/integration_test/run_tests.sh b/integration_test/run_tests.sh index d37bb49b8..681d2dc1e 100755 --- a/integration_test/run_tests.sh +++ b/integration_test/run_tests.sh @@ -3,16 +3,13 @@ # Exit immediately if a command exits with a non-zero status. set -e -function usage { - echo "Usage: $0 " - exit 1 -} +PROJECT_ID="${GCLOUD_PROJECT}" +TIMESTAMP=$(date +%s) -# The first parameter is required and is the Firebase project id. -if [[ $1 == "" ]]; then - usage +if [[ "${PROJECT_ID}" == "" ]]; then + echo "process.env.GCLOUD_PROJECT cannot be empty" + exit 1 fi -PROJECT_ID=$1 # Directory where this script lives. DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" @@ -23,66 +20,86 @@ function announce { function build_sdk { announce "Building SDK..." - cd $DIR/.. + cd "${DIR}/.." rm -f firebase-functions-*.tgz npm run build:pack - mv firebase-functions-*.tgz integration_test/functions/firebase-functions.tgz + mv firebase-functions-*.tgz "integration_test/functions/firebase-functions-${TIMESTAMP}.tgz" +} + +# Creates a Package.json from package.json.template +# @param timestmap of the current SDK build +# @param Node version to test under +function create_package_json { + cd "${DIR}" + cp package.json.template functions/package.json + # we have to do the -e flag here so that it work both on linux and mac os, but that creates an extra + # backup file called package.json-e that we should clean up afterwards. + sed -i -e "s/__SDK_TARBALL__/firebase-functions-$1.tgz/g" functions/package.json + sed -i -e "s/__NODE_VERSION__/$2/g" functions/package.json + sed -i -e "s/__FIREBASE_ADMIN__/$3/g" functions/package.json + rm -f functions/package.json-e } function install_deps { announce "Installing dependencies..." - cd $DIR/functions + cd "${DIR}/functions" + rm -rf node_modules/firebase-functions npm install } function delete_all_functions { - announce "Deploying empty index.js to project..." - cd $DIR - ./functions/node_modules/.bin/tsc -p functions/ # Make sure the functions/lib directory actually exists. - echo "" > functions/lib/index.js - firebase deploy --project=$PROJECT_ID --only functions + announce "Deleting all functions in project..." + cd "${DIR}" + # Try to delete, if there are errors it is because the project is already empty, + # in that case do nothing. + firebase functions:delete integrationTests v1 v2 --force --project=$PROJECT_ID || : & + wait announce "Project emptied." } function deploy { - announce "Deploying functions..." - cd $DIR - ./functions/node_modules/.bin/tsc -p functions/ - # Deploy functions, and security rules for database and Firestore - firebase deploy --project=$PROJECT_ID --only functions,database,firestore + # Deploy functions, and security rules for database and Firestore. If the deploy fails, retry twice + for i in 1 2; do firebase deploy --project="${PROJECT_ID}" --only functions,database,firestore && break; done } function run_tests { - announce "Running the integration tests..." + announce "Running integration tests..." # Construct the URL for the test function. This may change in the future, # causing this script to start failing, but currently we don't have a very # reliable way of determining the URL dynamically. TEST_DOMAIN="cloudfunctions.net" - if [[ $FIREBASE_FUNCTIONS_URL == "https://preprod-cloudfunctions.sandbox.googleapis.com" ]]; then - TEST_DOMAIN="txcloud.net" + if [[ "${FIREBASE_FUNCTIONS_TEST_REGION}" == "" ]]; then + FIREBASE_FUNCTIONS_TEST_REGION="us-central1" fi - TEST_URL="https://us-central1-$PROJECT_ID.$TEST_DOMAIN/integrationTests" - echo $TEST_URL + TEST_URL="https://${FIREBASE_FUNCTIONS_TEST_REGION}-${PROJECT_ID}.${TEST_DOMAIN}/integrationTests" + echo "${TEST_URL}" - curl --fail $TEST_URL + curl --fail -H "Authorization: Bearer $(gcloud auth print-identity-token)" "${TEST_URL}" } function cleanup { announce "Performing cleanup..." delete_all_functions - rm $DIR/functions/firebase-functions.tgz - rm -f $DIR/functions/firebase-debug.log - rm -rf $DIR/functions/node_modules/firebase-functions + rm "${DIR}/functions/firebase-functions-${TIMESTAMP}.tgz" + rm "${DIR}/functions/package.json" + rm -f "${DIR}/functions/firebase-debug.log" + rm -rf "${DIR}/functions/lib" + rm -rf "${DIR}/functions/node_modules" } +# Setup build_sdk -install_deps delete_all_functions -deploy -run_tests -announce "Re-deploying the same functions to make sure updates work..." -deploy -run_tests + +for version in 14 16; do + create_package_json $TIMESTAMP $version "^10.0.0" + install_deps + announce "Re-deploying the same functions to Node $version runtime ..." + deploy + run_tests +done + +# Cleanup cleanup announce "All tests pass!" diff --git a/spec/testing.spec.ts b/logger/compat.js similarity index 75% rename from spec/testing.spec.ts rename to logger/compat.js index 83254e881..7d725acc3 100644 --- a/spec/testing.spec.ts +++ b/logger/compat.js @@ -1,6 +1,6 @@ // The MIT License (MIT) // -// Copyright (c) 2017 Firebase +// Copyright (c) 2021 Firebase // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,13 +20,7 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. -import { expect } from 'chai'; - -import * as testing from '../src/testing'; - -// TODO(rjh): As actual testing methods become available, replace this with actual tests. -describe('testing', () => { - it('should be accessible through the entrypoint', function () { - expect(testing.whereAreTheBugs()).to.not.equal('Earth'); - }); -}); +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/logger/index.js b/logger/index.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/logger/index.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/mocha/setup.ts b/mocha/setup.ts new file mode 100644 index 000000000..5a7b64c80 --- /dev/null +++ b/mocha/setup.ts @@ -0,0 +1,6 @@ +import chai from "chai"; +import chaiAsPromised from "chai-as-promised"; +import nock from "nock"; + +chai.use(chaiAsPromised); +nock.disableNetConnect(); diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..1f231672c --- /dev/null +++ b/package-lock.json @@ -0,0 +1,9854 @@ +{ + "name": "firebase-functions", + "version": "7.0.0-rc.2", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "firebase-functions", + "version": "7.0.0-rc.2", + "license": "MIT", + "dependencies": { + "@types/cors": "^2.8.5", + "@types/express": "^4.17.21", + "cors": "^2.8.5", + "express": "^4.21.0", + "protobufjs": "^7.2.2" + }, + "bin": { + "firebase-functions": "lib/bin/firebase-functions.js" + }, + "devDependencies": { + "@eslint/eslintrc": "^3.3.1", + "@firebase/api-documenter": "^0.2.0", + "@microsoft/api-documenter": "^7.13.45", + "@microsoft/api-extractor": "^7.18.7", + "@types/chai": "^4.1.7", + "@types/chai-as-promised": "^7.1.0", + "@types/jsonwebtoken": "^9.0.0", + "@types/mocha": "^5.2.7", + "@types/mock-require": "^2.0.0", + "@types/nock": "^10.0.3", + "@types/node": "^18.0.0", + "@types/node-fetch": "^3.0.3", + "@types/sinon": "^9.0.11", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", + "api-extractor-model-me": "^0.1.1", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "child-process-promise": "^2.2.1", + "eslint": "^9.38.0", + "eslint-config-google": "^0.14.0", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-jsdoc": "^61.1.9", + "eslint-plugin-prettier": "^4.2.1", + "firebase-admin": "^13.0.0", + "genkit": "^1.0.0-rc.4", + "jsdom": "^16.2.1", + "jsonwebtoken": "^9.0.0", + "jwk-to-pem": "^2.0.5", + "mocha": "^10.2.0", + "mock-require": "^3.0.3", + "mz": "^2.7.0", + "nock": "^13.2.9", + "node-fetch": "^2.6.7", + "portfinder": "^1.0.28", + "prettier": "^2.8.8", + "protobufjs-cli": "^1.1.1", + "semver": "^7.3.5", + "sinon": "^9.2.4", + "ts-node": "^10.4.0", + "tsdown": "^0.15.11", + "typescript": "^5.9.3", + "yaml": "^2.8.1", + "yargs": "^15.3.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "firebase-admin": "^11.10.0 || ^12.0.0 || ^13.0.0" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/generator/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@emnapi/core": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.6.0.tgz", + "integrity": "sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.6.0.tgz", + "integrity": "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@es-joy/jsdoccomment": { + "version": "0.76.0", + "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.76.0.tgz", + "integrity": "sha512-g+RihtzFgGTx2WYCuTHbdOXJeAlGnROws0TeALx9ow/ZmOROOZkVg5wp/B44n0WJgI4SQFP1eWM2iRPlU2Y14w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.8", + "@typescript-eslint/types": "^8.46.0", + "comment-parser": "1.4.1", + "esquery": "^1.6.0", + "jsdoc-type-pratt-parser": "~6.10.0" + }, + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@es-joy/resolve.exports": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@es-joy/resolve.exports/-/resolve.exports-1.0.0.tgz", + "integrity": "sha512-bbrmzsAZ9GA/3oBS6r8PWMtZarEhKHr413hak8ArwMEZ5DtaLErnkcyEWUsXy7urBcmVu/TpDzHPDVM5uIbx9A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.1.tgz", + "integrity": "sha512-csZAzkNhsgwb0I/UAV6/RGFTbiakPCf0ZrGmrIxQpYvGZ00PhTkSnyKNolphgIvmnJeGw6rcGVEXfTzUnFuEvw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", + "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@eslint/eslintrc/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/@eslint/eslintrc/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "9.38.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.38.0.tgz", + "integrity": "sha512-UZ1VpFvXf9J06YG9xQBdnzU+kthors6KjhMAl6f4gH4usHyh31rUf2DLGInT8RFYIReYXNSydgPY0V2LuWgl7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.0.tgz", + "integrity": "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz", + "integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==", + "dev": true + }, + "node_modules/@firebase/api-documenter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@firebase/api-documenter/-/api-documenter-0.2.0.tgz", + "integrity": "sha512-WQcOP5TvtRWMfGkpJpKpyVDjcB2UYCZWFmQm/nXUYUdI6PZ/Im1yb2YydgpnSlhrZxz6C1YkYFGLYCrltks1Yw==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.12.24", + "@rushstack/node-core-library": "3.45.5", + "@rushstack/ts-command-line": "4.11.0", + "api-extractor-model-me": "0.1.1", + "colors": "~1.4.0", + "js-yaml": "4.1.0", + "resolve": "~1.22.0", + "tslib": "^2.1.0" + }, + "bin": { + "api-documenter-fire": "dist/start.js" + } + }, + "node_modules/@firebase/api-documenter/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/@firebase/api-documenter/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@firebase/app-check-interop-types": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.3.tgz", + "integrity": "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A==", + "dev": true + }, + "node_modules/@firebase/app-types": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.3.tgz", + "integrity": "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw==", + "dev": true + }, + "node_modules/@firebase/auth-interop-types": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.4.tgz", + "integrity": "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA==", + "dev": true + }, + "node_modules/@firebase/component": { + "version": "0.6.12", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.12.tgz", + "integrity": "sha512-YnxqjtohLbnb7raXt2YuA44cC1wA9GiehM/cmxrsoxKlFxBLy2V0OkRSj9gpngAE0UoJ421Wlav9ycO7lTPAUw==", + "dev": true, + "dependencies": { + "@firebase/util": "1.10.3", + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@firebase/database": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.11.tgz", + "integrity": "sha512-gLrw/XeioswWUXgpVKCPAzzoOuvYNqK5fRUeiJTzO7Mlp9P6ylFEyPJlRBl1djqYye641r3MX6AmIeMXwjgwuQ==", + "dev": true, + "dependencies": { + "@firebase/app-check-interop-types": "0.3.3", + "@firebase/auth-interop-types": "0.2.4", + "@firebase/component": "0.6.12", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.3", + "faye-websocket": "0.11.4", + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@firebase/database-compat": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-2.0.2.tgz", + "integrity": "sha512-5zvdnMsfDHvrQAVM6jBS7CkBpu+z3YbpFdhxRsrK1FP45IEfxlzpeuEUb17D/tpM10vfq4Ok0x5akIBaCv7gfA==", + "dev": true, + "dependencies": { + "@firebase/component": "0.6.12", + "@firebase/database": "1.0.11", + "@firebase/database-types": "1.0.8", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.3", + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@firebase/database-types": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.8.tgz", + "integrity": "sha512-6lPWIGeufhUq1heofZULyVvWFhD01TUrkkB9vyhmksjZ4XF7NaivQp9rICMk7QNhqwa+uDCaj4j+Q8qqcSVZ9g==", + "dev": true, + "dependencies": { + "@firebase/app-types": "0.9.3", + "@firebase/util": "1.10.3" + } + }, + "node_modules/@firebase/logger": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.4.tgz", + "integrity": "sha512-mH0PEh1zoXGnaR8gD1DeGeNZtWFKbnz9hDO91dIml3iou1gpOnLqXQ2dJfB71dj6dpmUjcQ6phY3ZZJbjErr9g==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@firebase/util": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.10.3.tgz", + "integrity": "sha512-wfoF5LTy0m2ufUapV0ZnpcGQvuavTbJ5Qr1Ze9OJGL70cSMvhDyjS4w2121XdA3lGZSTOsDOyGhpoDtYwck85A==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@genkit-ai/ai": { + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/@genkit-ai/ai/-/ai-1.0.0-rc.4.tgz", + "integrity": "sha512-EpO4DOmgwJeoHrUU3LQnpLUdD+KR6cXyQB9RVvKu8fENDLvYQAlmW6sJiqRI0YekKyrHyrfsMdkXsuuFQIojSA==", + "dev": true, + "dependencies": { + "@genkit-ai/core": "1.0.0-rc.4", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.11.19", + "colorette": "^2.0.20", + "json5": "^2.2.3", + "node-fetch": "^3.3.2", + "partial-json": "^0.1.7", + "uuid": "^10.0.0" + } + }, + "node_modules/@genkit-ai/ai/node_modules/@types/node": { + "version": "20.17.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.14.tgz", + "integrity": "sha512-w6qdYetNL5KRBiSClK/KWai+2IMEJuAj+EujKCumalFOwXtvOXaEan9AuwcRID2IcOIAWSIfR495hBtgKlx2zg==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@genkit-ai/ai/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dev": true, + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/@genkit-ai/ai/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@genkit-ai/core": { + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/@genkit-ai/core/-/core-1.0.0-rc.4.tgz", + "integrity": "sha512-wOxbWkaYHvkR4mB3AsKN32fC5qZg18IlurhHLYSP/WFcGkAbUFkLniiFoxfzlsfkwd8TYsrJpHJWdKbkJ5WixA==", + "dev": true, + "dependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/context-async-hooks": "^1.25.0", + "@opentelemetry/core": "^1.25.0", + "@opentelemetry/sdk-metrics": "^1.25.0", + "@opentelemetry/sdk-node": "^0.52.0", + "@opentelemetry/sdk-trace-base": "^1.25.0", + "@types/json-schema": "^7.0.15", + "ajv": "^8.12.0", + "ajv-formats": "^3.0.1", + "async-mutex": "^0.5.0", + "body-parser": "^1.20.3", + "cors": "^2.8.5", + "express": "^4.21.0", + "get-port": "^5.1.0", + "json-schema": "^0.4.0", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.22.4" + } + }, + "node_modules/@genkit-ai/dotprompt": { + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/@genkit-ai/dotprompt/-/dotprompt-1.0.0-rc.4.tgz", + "integrity": "sha512-hzRg/+427FF6El9ByATl4ep2eVz1vuZ1hebLfnNpK0A+vM8ZBq6pkNKDtO8rT+Wy/A5TzJeWvGZ34knDsjFAWA==", + "dev": true, + "dependencies": { + "@genkit-ai/ai": "1.0.0-rc.4", + "@genkit-ai/core": "1.0.0-rc.4", + "front-matter": "^4.0.2", + "handlebars": "^4.7.8", + "node-fetch": "^3.3.2" + } + }, + "node_modules/@genkit-ai/dotprompt/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dev": true, + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/@google-cloud/firestore": { + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.0.tgz", + "integrity": "sha512-88uZ+jLsp1aVMj7gh3EKYH1aulTAMFAp8sH/v5a9w8q8iqSG27RiWLoxSAFr/XocZ9hGiWH1kEnBw+zl3xAgNA==", + "dev": true, + "optional": true, + "dependencies": { + "@opentelemetry/api": "^1.3.0", + "fast-deep-equal": "^3.1.1", + "functional-red-black-tree": "^1.0.1", + "google-gax": "^4.3.3", + "protobufjs": "^7.2.6" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/paginator": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", + "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", + "dev": true, + "optional": true, + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage": { + "version": "7.15.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.15.0.tgz", + "integrity": "sha512-/j/+8DFuEOo33fbdX0V5wjooOoFahEaMEdImHBmM2tH9MPHJYNtmXOf2sGUmZmiufSukmBEvdlzYgDkkgeBiVQ==", + "dev": true, + "optional": true, + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "duplexify": "^4.1.3", + "fast-xml-parser": "^4.4.1", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "html-entities": "^2.5.2", + "mime": "^3.0.0", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "dev": true, + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "dev": true, + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@grpc/proto-loader/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@grpc/proto-loader/node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/@grpc/proto-loader/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@grpc/proto-loader/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@grpc/proto-loader/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@grpc/proto-loader/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/gen-mapping/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@jsdoc/salty": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@jsdoc/salty/-/salty-0.2.9.tgz", + "integrity": "sha512-yYxMVH7Dqw6nO0d5NIV8OQWnitU8k6vXH8NtgqAfIa/IUqRMxRv/NUJJ08VEKbAakwxlgBl5PJdrU0dMPStsnw==", + "dev": true, + "dependencies": { + "lodash": "^4.17.21" + }, + "engines": { + "node": ">=v12.0.0" + } + }, + "node_modules/@microsoft/api-documenter": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@microsoft/api-documenter/-/api-documenter-7.26.5.tgz", + "integrity": "sha512-E1V8FIHd1ePefbvCZoQfusBPMyKqIq/VqgfJGeZKjOYluwQMlZEgJT18t0XH8zPMO5/rB/PWAVkv4fKrsnoYjw==", + "dev": true, + "dependencies": { + "@microsoft/api-extractor-model": "7.30.2", + "@microsoft/tsdoc": "~0.15.1", + "@rushstack/node-core-library": "5.10.2", + "@rushstack/terminal": "0.14.5", + "@rushstack/ts-command-line": "4.23.3", + "js-yaml": "~3.13.1", + "resolve": "~1.22.1" + }, + "bin": { + "api-documenter": "bin/api-documenter" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/@microsoft/tsdoc": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz", + "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", + "dev": true + }, + "node_modules/@microsoft/api-documenter/node_modules/@rushstack/node-core-library": { + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-5.10.2.tgz", + "integrity": "sha512-xOF/2gVJZTfjTxbo4BDj9RtQq/HFnrrKdtem4JkyRLnwsRz2UDTg8gA1/et10fBx5RxmZD9bYVGST69W8ME5OQ==", + "dev": true, + "dependencies": { + "ajv": "~8.13.0", + "ajv-draft-04": "~1.0.0", + "ajv-formats": "~3.0.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.22.1", + "semver": "~7.5.4" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@microsoft/api-documenter/node_modules/@rushstack/ts-command-line": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@rushstack/ts-command-line/-/ts-command-line-4.23.3.tgz", + "integrity": "sha512-HazKL8fv4HMQMzrKJCrOrhyBPPdzk7iajUXgsASwjQ8ROo1cmgyqxt/k9+SdmrNLGE1zATgRqMUH3s/6smbRMA==", + "dev": true, + "dependencies": { + "@rushstack/terminal": "0.14.5", + "@types/argparse": "1.0.38", + "argparse": "~1.0.9", + "string-argv": "~0.3.1" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/ajv": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.13.0.tgz", + "integrity": "sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-documenter/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@microsoft/api-extractor": { + "version": "7.49.1", + "resolved": "https://registry.npmjs.org/@microsoft/api-extractor/-/api-extractor-7.49.1.tgz", + "integrity": "sha512-jRTR/XbQF2kb+dYn8hfYSicOGA99+Fo00GrsdMwdfE3eIgLtKdH6Qa2M3wZV9S2XmbgCaGX1OdPtYctbfu5jQg==", + "dev": true, + "dependencies": { + "@microsoft/api-extractor-model": "7.30.2", + "@microsoft/tsdoc": "~0.15.1", + "@microsoft/tsdoc-config": "~0.17.1", + "@rushstack/node-core-library": "5.10.2", + "@rushstack/rig-package": "0.5.3", + "@rushstack/terminal": "0.14.5", + "@rushstack/ts-command-line": "4.23.3", + "lodash": "~4.17.15", + "minimatch": "~3.0.3", + "resolve": "~1.22.1", + "semver": "~7.5.4", + "source-map": "~0.6.1", + "typescript": "5.7.2" + }, + "bin": { + "api-extractor": "bin/api-extractor" + } + }, + "node_modules/@microsoft/api-extractor-model": { + "version": "7.30.2", + "resolved": "https://registry.npmjs.org/@microsoft/api-extractor-model/-/api-extractor-model-7.30.2.tgz", + "integrity": "sha512-3/t2F+WhkJgBzSNwlkTIL0tBgUoBqDqL66pT+nh2mPbM0NIDGVGtpqbGWPgHIzn/mn7kGS/Ep8D8po58e8UUIw==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "~0.15.1", + "@microsoft/tsdoc-config": "~0.17.1", + "@rushstack/node-core-library": "5.10.2" + } + }, + "node_modules/@microsoft/api-extractor-model/node_modules/@microsoft/tsdoc": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz", + "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", + "dev": true + }, + "node_modules/@microsoft/api-extractor-model/node_modules/@rushstack/node-core-library": { + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-5.10.2.tgz", + "integrity": "sha512-xOF/2gVJZTfjTxbo4BDj9RtQq/HFnrrKdtem4JkyRLnwsRz2UDTg8gA1/et10fBx5RxmZD9bYVGST69W8ME5OQ==", + "dev": true, + "dependencies": { + "ajv": "~8.13.0", + "ajv-draft-04": "~1.0.0", + "ajv-formats": "~3.0.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.22.1", + "semver": "~7.5.4" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@microsoft/api-extractor-model/node_modules/ajv": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.13.0.tgz", + "integrity": "sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/api-extractor-model/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-extractor-model/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-extractor-model/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@microsoft/api-extractor/node_modules/@microsoft/tsdoc": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz", + "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", + "dev": true + }, + "node_modules/@microsoft/api-extractor/node_modules/@rushstack/node-core-library": { + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-5.10.2.tgz", + "integrity": "sha512-xOF/2gVJZTfjTxbo4BDj9RtQq/HFnrrKdtem4JkyRLnwsRz2UDTg8gA1/et10fBx5RxmZD9bYVGST69W8ME5OQ==", + "dev": true, + "dependencies": { + "ajv": "~8.13.0", + "ajv-draft-04": "~1.0.0", + "ajv-formats": "~3.0.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.22.1", + "semver": "~7.5.4" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@microsoft/api-extractor/node_modules/@rushstack/ts-command-line": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@rushstack/ts-command-line/-/ts-command-line-4.23.3.tgz", + "integrity": "sha512-HazKL8fv4HMQMzrKJCrOrhyBPPdzk7iajUXgsASwjQ8ROo1cmgyqxt/k9+SdmrNLGE1zATgRqMUH3s/6smbRMA==", + "dev": true, + "dependencies": { + "@rushstack/terminal": "0.14.5", + "@types/argparse": "1.0.38", + "argparse": "~1.0.9", + "string-argv": "~0.3.1" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/ajv": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.13.0.tgz", + "integrity": "sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/typescript": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", + "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/@microsoft/api-extractor/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@microsoft/tsdoc": { + "version": "0.12.24", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.12.24.tgz", + "integrity": "sha512-Mfmij13RUTmHEMi9vRUhMXD7rnGR2VvxeNYtaGtaJ4redwwjT4UXYJ+nzmVJF7hhd4pn/Fx5sncDKxMVFJSWPg==", + "dev": true + }, + "node_modules/@microsoft/tsdoc-config": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc-config/-/tsdoc-config-0.17.1.tgz", + "integrity": "sha512-UtjIFe0C6oYgTnad4q1QP4qXwLhe6tIpNTRStJ2RZEPIkqQPREAwE5spzVxsdn9UaEMUqhh0AqSx3X4nWAKXWw==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.15.1", + "ajv": "~8.12.0", + "jju": "~1.4.0", + "resolve": "~1.22.2" + } + }, + "node_modules/@microsoft/tsdoc-config/node_modules/@microsoft/tsdoc": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz", + "integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==", + "dev": true + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.7.tgz", + "integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.5.0", + "@emnapi/runtime": "^1.5.0", + "@tybys/wasm-util": "^0.10.1" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/api-logs": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.52.1.tgz", + "integrity": "sha512-qnSqB2DQ9TPP96dl8cDubDvrUyWc0/sK81xHTK8eSUspzDM3bsewX903qclQFvVhgStjRWdC5bLb3kQqMkfV5A==", + "dev": true, + "dependencies": { + "@opentelemetry/api": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/context-async-hooks": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.30.1.tgz", + "integrity": "sha512-s5vvxXPVdjqS3kTLKMeBMvop9hbWkwzBpu+mUO2M7sZtlkyDJGwFe33wRKnbaYDo8ExRVBIIdwIGrqpxHuKttA==", + "dev": true, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.52.1.tgz", + "integrity": "sha512-pVkSH20crBwMTqB3nIN4jpQKUEoB0Z94drIHpYyEqs7UBr+I0cpYyOR3bqjA/UasQUMROb3GX8ZX4/9cVRqGBQ==", + "dev": true, + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/otlp-grpc-exporter-base": "0.52.1", + "@opentelemetry/otlp-transformer": "0.52.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.52.1.tgz", + "integrity": "sha512-05HcNizx0BxcFKKnS5rwOV+2GevLTVIRA0tRgWYyw4yCgR53Ic/xk83toYKts7kbzcI+dswInUg/4s8oyA+tqg==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/otlp-exporter-base": "0.52.1", + "@opentelemetry/otlp-transformer": "0.52.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-proto/-/exporter-trace-otlp-proto-0.52.1.tgz", + "integrity": "sha512-pt6uX0noTQReHXNeEslQv7x311/F1gJzMnp1HD2qgypLRPbXDeMzzeTngRTUaUbP6hqWNtPxuLr4DEoZG+TcEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/otlp-exporter-base": "0.52.1", + "@opentelemetry/otlp-transformer": "0.52.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/exporter-zipkin": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-zipkin/-/exporter-zipkin-1.25.1.tgz", + "integrity": "sha512-RmOwSvkimg7ETwJbUOPTMhJm9A9bG1U8s7Zo3ajDh4zM7eYcycQ0dM7FbLD6NXWbI2yj7UY4q8BKinKYBQksyw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/instrumentation": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.52.1.tgz", + "integrity": "sha512-uXJbYU/5/MBHjMp1FqrILLRuiJCs3Ofk0MeRDk8g1S1gD47U8X3JnSwcMO1rtRo1x1a7zKaQHaoYu49p/4eSKw==", + "dev": true, + "dependencies": { + "@opentelemetry/api-logs": "0.52.1", + "@types/shimmer": "^1.0.2", + "import-in-the-middle": "^1.8.1", + "require-in-the-middle": "^7.1.1", + "semver": "^7.5.2", + "shimmer": "^1.2.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.52.1.tgz", + "integrity": "sha512-z175NXOtX5ihdlshtYBe5RpGeBoTXVCKPPLiQlD6FHvpM4Ch+p2B0yWKYSrBfLH24H9zjJiBdTrtD+hLlfnXEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/otlp-transformer": "0.52.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-exporter-base/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.52.1.tgz", + "integrity": "sha512-zo/YrSDmKMjG+vPeA9aBBrsQM9Q/f2zo6N04WMB3yNldJRsgpRBeLLwvAt/Ba7dpehDLOEFBd1i2JCoaFtpCoQ==", + "dev": true, + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/otlp-exporter-base": "0.52.1", + "@opentelemetry/otlp-transformer": "0.52.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/otlp-transformer": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.52.1.tgz", + "integrity": "sha512-I88uCZSZZtVa0XniRqQWKbjAUm73I8tpEy/uJYPPYw5d7BRdVk0RfTBQw8kSUl01oVWEuqxLDa802222MYyWHg==", + "dev": true, + "dependencies": { + "@opentelemetry/api-logs": "0.52.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-logs": "0.52.1", + "@opentelemetry/sdk-metrics": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1", + "protobufjs": "^7.3.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/sdk-metrics": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.25.1.tgz", + "integrity": "sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "lodash.merge": "^4.6.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/otlp-transformer/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/propagator-b3": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-b3/-/propagator-b3-1.25.1.tgz", + "integrity": "sha512-p6HFscpjrv7//kE+7L+3Vn00VEDUJB0n6ZrjkTYHrJ58QZ8B3ajSJhRbCcY6guQ3PDjTbxWklyvIN2ojVbIb1A==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-b3/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/propagator-jaeger": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.25.1.tgz", + "integrity": "sha512-nBprRf0+jlgxks78G/xq72PipVK+4or9Ypntw0gVZYNTCSK8rg5SeaGV19tV920CMqBD/9UIOiFr23Li/Q8tiA==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/resources": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.30.1.tgz", + "integrity": "sha512-5UxZqiAgLYGFjS4s9qm5mBVo433u+dSPUFWVWXmLAD4wB65oMCoXaJP1KJa9DIYYMeHu3z4BZcStG3LC593cWA==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.30.1", + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-logs/-/sdk-logs-0.52.1.tgz", + "integrity": "sha512-MBYh+WcPPsN8YpRHRmK1Hsca9pVlyyKd4BxOC4SsgHACnl/bPp4Cri9hWhVm5+2tiQ9Zf4qSc1Jshw9tOLGWQA==", + "dev": true, + "dependencies": { + "@opentelemetry/api-logs": "0.52.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.4.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-logs/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-metrics": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.30.1.tgz", + "integrity": "sha512-q9zcZ0Okl8jRgmy7eNW3Ku1XSgg3sDLa5evHZpCwjspw7E8Is4K/haRPDJrBcX3YSn/Y7gUvFnByNYEKQNbNog==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.30.1", + "@opentelemetry/resources": "1.30.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node": { + "version": "0.52.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-node/-/sdk-node-0.52.1.tgz", + "integrity": "sha512-uEG+gtEr6eKd8CVWeKMhH2olcCHM9dEK68pe0qE0be32BcCRsvYURhHaD1Srngh1SQcnQzZ4TP324euxqtBOJA==", + "dev": true, + "dependencies": { + "@opentelemetry/api-logs": "0.52.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/exporter-trace-otlp-grpc": "0.52.1", + "@opentelemetry/exporter-trace-otlp-http": "0.52.1", + "@opentelemetry/exporter-trace-otlp-proto": "0.52.1", + "@opentelemetry/exporter-zipkin": "1.25.1", + "@opentelemetry/instrumentation": "0.52.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/sdk-logs": "0.52.1", + "@opentelemetry/sdk-metrics": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1", + "@opentelemetry/sdk-trace-node": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-metrics": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.25.1.tgz", + "integrity": "sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "lodash.merge": "^4.6.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-node/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.30.1.tgz", + "integrity": "sha512-jVPgBbH1gCy2Lb7X0AVQ8XAfgg0pJ4nvl8/IiQA6nxOsPvS+0zMJaFSs2ltXe0J6C8dqjcnpyqINDJmU30+uOg==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.30.1", + "@opentelemetry/resources": "1.30.1", + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.25.1.tgz", + "integrity": "sha512-nMcjFIKxnFqoez4gUmihdBrbpsEnAX/Xj16sGvZm+guceYE0NE00vLhpDVK6f3q8Q4VFI5xG8JjlXKMB/SkTTQ==", + "dev": true, + "dependencies": { + "@opentelemetry/context-async-hooks": "1.25.1", + "@opentelemetry/core": "1.25.1", + "@opentelemetry/propagator-b3": "1.25.1", + "@opentelemetry/propagator-jaeger": "1.25.1", + "@opentelemetry/sdk-trace-base": "1.25.1", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/context-async-hooks": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.25.1.tgz", + "integrity": "sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ==", + "dev": true, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/core": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.25.1.tgz", + "integrity": "sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==", + "dev": true, + "dependencies": { + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/resources": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.25.1.tgz", + "integrity": "sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz", + "integrity": "sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==", + "dev": true, + "dependencies": { + "@opentelemetry/core": "1.25.1", + "@opentelemetry/resources": "1.25.1", + "@opentelemetry/semantic-conventions": "1.25.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.25.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz", + "integrity": "sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@oxc-project/runtime": { + "version": "0.95.0", + "resolved": "https://registry.npmjs.org/@oxc-project/runtime/-/runtime-0.95.0.tgz", + "integrity": "sha512-qJS5pNepwMGnafO9ayKGz7rfPQgUBuunHpnP1//9Qa0zK3oT3t1EhT+I+pV9MUA+ZKez//OFqxCxf1vijCKb2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.95.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.95.0.tgz", + "integrity": "sha512-vACy7vhpMPhjEJhULNxrdR0D943TkA/MigMpJCHmBHvMXxRStRi/dPtTlfQ3uDwWSzRpT8z+7ImjZVf8JWBocQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "node_modules/@quansync/fs": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@quansync/fs/-/fs-0.1.5.tgz", + "integrity": "sha512-lNS9hL2aS2NZgNW7BBj+6EBl4rOf8l+tQ0eRY6JWCI8jI2kc53gSoqbjojU0OnAWhzoXiOjFyGsHcDGePB3lhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "quansync": "^0.2.11" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-beta.45.tgz", + "integrity": "sha512-bfgKYhFiXJALeA/riil908+2vlyWGdwa7Ju5S+JgWZYdR4jtiPOGdM6WLfso1dojCh+4ZWeiTwPeV9IKQEX+4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-beta.45.tgz", + "integrity": "sha512-xjCv4CRVsSnnIxTuyH1RDJl5OEQ1c9JYOwfDAHddjJDxCw46ZX9q80+xq7Eok7KC4bRSZudMJllkvOKv0T9SeA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-beta.45.tgz", + "integrity": "sha512-ddcO9TD3D/CLUa/l8GO8LHzBOaZqWg5ClMy3jICoxwCuoz47h9dtqPsIeTiB6yR501LQTeDsjA4lIFd7u3Ljfw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-beta.45.tgz", + "integrity": "sha512-MBTWdrzW9w+UMYDUvnEuh0pQvLENkl2Sis15fHTfHVW7ClbGuez+RWopZudIDEGkpZXdeI4CkRXk+vdIIebrmg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-beta.45.tgz", + "integrity": "sha512-4YgoCFiki1HR6oSg+GxxfzfnVCesQxLF1LEnw9uXS/MpBmuog0EOO2rYfy69rWP4tFZL9IWp6KEfGZLrZ7aUog==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-beta.45.tgz", + "integrity": "sha512-LE1gjAwQRrbCOorJJ7LFr10s5vqYf5a00V5Ea9wXcT2+56n5YosJkcp8eQ12FxRBv2YX8dsdQJb+ZTtYJwb6XQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-beta.45.tgz", + "integrity": "sha512-tdy8ThO/fPp40B81v0YK3QC+KODOmzJzSUOO37DinQxzlTJ026gqUSOM8tzlVixRbQJltgVDCTYF8HNPRErQTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-beta.45.tgz", + "integrity": "sha512-lS082ROBWdmOyVY/0YB3JmsiClaWoxvC+dA8/rbhyB9VLkvVEaihLEOr4CYmrMse151C4+S6hCw6oa1iewox7g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-beta.45.tgz", + "integrity": "sha512-Hi73aYY0cBkr1/SvNQqH8Cd+rSV6S9RB5izCv0ySBcRnd/Wfn5plguUoGYwBnhHgFbh6cPw9m2dUVBR6BG1gxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-beta.45.tgz", + "integrity": "sha512-fljEqbO7RHHogNDxYtTzr+GNjlfOx21RUyGmF+NrkebZ8emYYiIqzPxsaMZuRx0rgZmVmliOzEp86/CQFDKhJQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-beta.45.tgz", + "integrity": "sha512-ZJDB7lkuZE9XUnWQSYrBObZxczut+8FZ5pdanm8nNS1DAo8zsrPuvGwn+U3fwU98WaiFsNrA4XHngesCGr8tEQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.0.7" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-beta.45.tgz", + "integrity": "sha512-zyzAjItHPUmxg6Z8SyRhLdXlJn3/D9KL5b9mObUrBHhWS/GwRH4665xCiFqeuktAhhWutqfc+rOV2LjK4VYQGQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-ia32-msvc": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.0.0-beta.45.tgz", + "integrity": "sha512-wODcGzlfxqS6D7BR0srkJk3drPwXYLu7jPHN27ce2c4PUnVVmJnp9mJzUQGT4LpmHmmVdMZ+P6hKvyTGBzc1CA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-beta.45.tgz", + "integrity": "sha512-wiU40G1nQo9rtfvF9jLbl79lUgjfaD/LTyUEw2Wg/gdF5OhjzpKMVugZQngO+RNdwYaNj+Fs+kWBWfp4VXPMHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.45.tgz", + "integrity": "sha512-Le9ulGCrD8ggInzWw/k2J8QcbPz7eGIOWqfJ2L+1R0Opm7n6J37s2hiDWlh6LJN0Lk9L5sUzMvRHKW7UxBZsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rushstack/node-core-library": { + "version": "3.45.5", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-3.45.5.tgz", + "integrity": "sha512-KbN7Hp9vH3bD3YJfv6RnVtzzTAwGYIBl7y2HQLY4WEQqRbvE3LgI78W9l9X+cTAXCX//p0EeoiUYNTFdqJrMZg==", + "dev": true, + "dependencies": { + "@types/node": "12.20.24", + "colors": "~1.2.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.17.0", + "semver": "~7.3.0", + "timsort": "~0.3.0", + "z-schema": "~5.0.2" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/@types/node": { + "version": "12.20.24", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.24.tgz", + "integrity": "sha512-yxDeaQIAJlMav7fH5AQqPH1u8YIuhYJXYBzxaQ4PifsU0GDO38MSdmEDeRlIxrKbC6NbEaaEHDanWb+y30U8SQ==", + "dev": true + }, + "node_modules/@rushstack/node-core-library/node_modules/colors": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.2.5.tgz", + "integrity": "sha512-erNRLao/Y3Fv54qUa0LBB+//Uf3YwMUmdJinN20yMXm9zdKKqH9wt7R9IIVZ+K7ShzfpLV/Zg8+VyrBJYB4lpg==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rushstack/node-core-library/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@rushstack/rig-package": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/@rushstack/rig-package/-/rig-package-0.5.3.tgz", + "integrity": "sha512-olzSSjYrvCNxUFZowevC3uz8gvKr3WTpHQ7BkpjtRpA3wK+T0ybep/SRUMfr195gBzJm5gaXw0ZMgjIyHqJUow==", + "dev": true, + "dependencies": { + "resolve": "~1.22.1", + "strip-json-comments": "~3.1.1" + } + }, + "node_modules/@rushstack/terminal": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/@rushstack/terminal/-/terminal-0.14.5.tgz", + "integrity": "sha512-TEOpNwwmsZVrkp0omnuTUTGZRJKTr6n6m4OITiNjkqzLAkcazVpwR1SOtBg6uzpkIBLgrcNHETqI8rbw3uiUfw==", + "dev": true, + "dependencies": { + "@rushstack/node-core-library": "5.10.2", + "supports-color": "~8.1.1" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@rushstack/terminal/node_modules/@rushstack/node-core-library": { + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-5.10.2.tgz", + "integrity": "sha512-xOF/2gVJZTfjTxbo4BDj9RtQq/HFnrrKdtem4JkyRLnwsRz2UDTg8gA1/et10fBx5RxmZD9bYVGST69W8ME5OQ==", + "dev": true, + "dependencies": { + "ajv": "~8.13.0", + "ajv-draft-04": "~1.0.0", + "ajv-formats": "~3.0.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.22.1", + "semver": "~7.5.4" + }, + "peerDependencies": { + "@types/node": "*" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@rushstack/terminal/node_modules/ajv": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.13.0.tgz", + "integrity": "sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@rushstack/terminal/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rushstack/terminal/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rushstack/terminal/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@rushstack/ts-command-line": { + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@rushstack/ts-command-line/-/ts-command-line-4.11.0.tgz", + "integrity": "sha512-ptG9L0mjvJ5QtK11GsAFY+jGfsnqHDS6CY6Yw1xT7a9bhjfNYnf6UPwjV+pF6UgiucfNcMDNW9lkDLxvZKKxMg==", + "dev": true, + "dependencies": { + "@types/argparse": "1.0.38", + "argparse": "~1.0.9", + "colors": "~1.2.1", + "string-argv": "~0.3.1" + } + }, + "node_modules/@rushstack/ts-command-line/node_modules/colors": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.2.5.tgz", + "integrity": "sha512-erNRLao/Y3Fv54qUa0LBB+//Uf3YwMUmdJinN20yMXm9zdKKqH9wt7R9IIVZ+K7ShzfpLV/Zg8+VyrBJYB4lpg==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@sindresorhus/base62": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/base62/-/base62-1.0.0.tgz", + "integrity": "sha512-TeheYy0ILzBEI/CO55CP6zJCSdSWeRtGnHy8U8dWSUH4I68iqTsy7HkMktR4xakThc9jotkPQUXT4ITdbV7cHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sinonjs/commons": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.6.tgz", + "integrity": "sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/commons/node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz", + "integrity": "sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "node_modules/@sinonjs/samsam": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.3.1.tgz", + "integrity": "sha512-1Hc0b1TtyfBu8ixF/tpfSHTVWKwCBLY4QJbkgnE7HcwyvT2xArDxb4K7dMgqRm3szI+LJbzmW/s4xxEhv6hwDg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.6.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + } + }, + "node_modules/@sinonjs/text-encoding": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz", + "integrity": "sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==", + "dev": true + }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/argparse": { + "version": "1.0.38", + "resolved": "https://registry.npmjs.org/@types/argparse/-/argparse-1.0.38.tgz", + "integrity": "sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA==", + "dev": true + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", + "dev": true, + "optional": true + }, + "node_modules/@types/chai": { + "version": "4.3.20", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.20.tgz", + "integrity": "sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ==", + "dev": true + }, + "node_modules/@types/chai-as-promised": { + "version": "7.1.8", + "resolved": "https://registry.npmjs.org/@types/chai-as-promised/-/chai-as-promised-7.1.8.tgz", + "integrity": "sha512-ThlRVIJhr69FLlh6IctTXFkmhtP3NpMZ2QGq69StYLyKZFp/HOp1VdKZj7RvfNWYYcJ1xlbLGLLWj1UvP5u/Gw==", + "dev": true, + "dependencies": { + "@types/chai": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/cors": { + "version": "2.8.17", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.17.tgz", + "integrity": "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.7.tgz", + "integrity": "sha512-ugo316mmTYBl2g81zDFnZ7cfxlut3o+/EQdaP7J8QN2kY6lJ22hmQYCK5EHcJHbrW+dkCGSCPgbG8JtYj6qSrg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "dev": true + }, + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", + "dev": true, + "optional": true + }, + "node_modules/@types/markdown-it": { + "version": "14.1.2", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", + "integrity": "sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==", + "dev": true, + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", + "dev": true + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==" + }, + "node_modules/@types/mocha": { + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.7.tgz", + "integrity": "sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ==", + "dev": true + }, + "node_modules/@types/mock-require": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/mock-require/-/mock-require-2.0.3.tgz", + "integrity": "sha512-0Hd1krmO7Dwa8haImu+eZXZ6FeCtixS8S1xvM6LWNJE5DFV5A92/zpAkQCDPOA/Z13d1xY3LqS7hpSWqlDzxrQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/nock": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/@types/nock/-/nock-10.0.3.tgz", + "integrity": "sha512-OthuN+2FuzfZO3yONJ/QVjKmLEuRagS9TV9lEId+WHL9KhftYG+/2z+pxlr0UgVVXSpVD8woie/3fzQn8ft/Ow==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/node-fetch": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-3.0.3.tgz", + "integrity": "sha512-HhggYPH5N+AQe/OmN6fmhKmRRt2XuNJow+R3pQwJxOOF9GuwM7O2mheyGeIrs5MOIeNjDEdgdoyHBOrFeJBR3g==", + "deprecated": "This is a stub types definition. node-fetch provides its own type definitions, so you do not need this installed.", + "dev": true, + "dependencies": { + "node-fetch": "*" + } + }, + "node_modules/@types/node/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "license": "MIT" + }, + "node_modules/@types/qs": { + "version": "6.9.18", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz", + "integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==" + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "dev": true, + "optional": true, + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/request/node_modules/form-data": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz", + "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/shimmer": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/shimmer/-/shimmer-1.2.0.tgz", + "integrity": "sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==", + "dev": true + }, + "node_modules/@types/sinon": { + "version": "9.0.11", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-9.0.11.tgz", + "integrity": "sha512-PwP4UY33SeeVKodNE37ZlOsR9cReypbMJOhZ7BVE0lB+Hix3efCOxiJWiE5Ia+yL9Cn2Ch72EjFTRze8RZsNtg==", + "dev": true, + "dependencies": { + "@types/sinonjs__fake-timers": "*" + } + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.5", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz", + "integrity": "sha512-mQkU2jY8jJEF7YHjHvsQO8+3ughTL1mcnn96igfhONmR+fUPSKIkefQYpSe8bsly2Ep7oQbn/6VG5/9/0qcArQ==", + "dev": true + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true, + "optional": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.2.tgz", + "integrity": "sha512-ZGBMToy857/NIPaaCucIUQgqueOiq7HeAKkhlvqVV4lm089zUFW6ikRySx2v+cAhKeUCPuWVHeimyk6Dw1iY3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.46.2", + "@typescript-eslint/type-utils": "8.46.2", + "@typescript-eslint/utils": "8.46.2", + "@typescript-eslint/visitor-keys": "8.46.2", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.46.2", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.2.tgz", + "integrity": "sha512-BnOroVl1SgrPLywqxyqdJ4l3S2MsKVLDVxZvjI1Eoe8ev2r3kGDo+PcMihNmDE+6/KjkTubSJnmqGZZjQSBq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.46.2", + "@typescript-eslint/types": "8.46.2", + "@typescript-eslint/typescript-estree": "8.46.2", + "@typescript-eslint/visitor-keys": "8.46.2", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.2.tgz", + "integrity": "sha512-PULOLZ9iqwI7hXcmL4fVfIsBi6AN9YxRc0frbvmg8f+4hQAjQ5GYNKK0DIArNo+rOKmR/iBYwkpBmnIwin4wBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.46.2", + "@typescript-eslint/types": "^8.46.2", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.2.tgz", + "integrity": "sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.2", + "@typescript-eslint/visitor-keys": "8.46.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.2.tgz", + "integrity": "sha512-a7QH6fw4S57+F5y2FIxxSDyi5M4UfGF+Jl1bCGd7+L4KsaUY80GsiF/t0UoRFDHAguKlBaACWJRmdrc6Xfkkag==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.2.tgz", + "integrity": "sha512-HbPM4LbaAAt/DjxXaG9yiS9brOOz6fabal4uvUmaUYe6l3K1phQDMQKBRUrr06BQkxkvIZVVHttqiybM9nJsLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.2", + "@typescript-eslint/typescript-estree": "8.46.2", + "@typescript-eslint/utils": "8.46.2", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.2.tgz", + "integrity": "sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.2.tgz", + "integrity": "sha512-f7rW7LJ2b7Uh2EiQ+7sza6RDZnajbNbemn54Ob6fRwQbgcIn+GWfyuHDHRYgRoZu1P4AayVScrRW+YfbTvPQoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.46.2", + "@typescript-eslint/tsconfig-utils": "8.46.2", + "@typescript-eslint/types": "8.46.2", + "@typescript-eslint/visitor-keys": "8.46.2", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.2.tgz", + "integrity": "sha512-sExxzucx0Tud5tE0XqR0lT0psBQvEpnpiul9XbGUB1QwpWJJAps1O/Z7hJxLGiZLBKMCutjTzDgmd1muEhBnVg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.46.2", + "@typescript-eslint/types": "8.46.2", + "@typescript-eslint/typescript-estree": "8.46.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.46.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.2.tgz", + "integrity": "sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/abab": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", + "deprecated": "Use your platform's native atob() and btoa() methods instead", + "dev": true + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "optional": true, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-globals": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", + "dev": true, + "dependencies": { + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1" + } + }, + "node_modules/acorn-globals/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "dev": true, + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "dev": true, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-draft-04": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz", + "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==", + "dev": true, + "peerDependencies": { + "ajv": "^8.5.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "dev": true, + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/ansis": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ansis/-/ansis-4.2.0.tgz", + "integrity": "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/api-extractor-model-me": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/api-extractor-model-me/-/api-extractor-model-me-0.1.1.tgz", + "integrity": "sha512-Ez801ZMADfkseOWNRFquvyQYDm3D9McpxfkKMWL6JFCGcpub0miJ+TFNphIR1nSZbrsxz3kIeOovNMY4VlL6Bw==", + "dev": true, + "dependencies": { + "@microsoft/tsdoc": "0.12.24", + "@rushstack/node-core-library": "3.36.0" + } + }, + "node_modules/api-extractor-model-me/node_modules/@rushstack/node-core-library": { + "version": "3.36.0", + "resolved": "https://registry.npmjs.org/@rushstack/node-core-library/-/node-core-library-3.36.0.tgz", + "integrity": "sha512-bID2vzXpg8zweXdXgQkKToEdZwVrVCN9vE9viTRk58gqzYaTlz4fMId6V3ZfpXN6H0d319uGi2KDlm+lUEeqCg==", + "dev": true, + "dependencies": { + "@types/node": "10.17.13", + "colors": "~1.2.1", + "fs-extra": "~7.0.1", + "import-lazy": "~4.0.0", + "jju": "~1.4.0", + "resolve": "~1.17.0", + "semver": "~7.3.0", + "timsort": "~0.3.0", + "z-schema": "~3.18.3" + } + }, + "node_modules/api-extractor-model-me/node_modules/@types/node": { + "version": "10.17.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.13.tgz", + "integrity": "sha512-pMCcqU2zT4TjqYFrWtYHKal7Sl30Ims6ulZ4UFXxI4xbtQqK/qqKwkDoBFCfooRqqmRu9vY3xaJRwxSh673aYg==", + "dev": true + }, + "node_modules/api-extractor-model-me/node_modules/colors": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.2.5.tgz", + "integrity": "sha512-erNRLao/Y3Fv54qUa0LBB+//Uf3YwMUmdJinN20yMXm9zdKKqH9wt7R9IIVZ+K7ShzfpLV/Zg8+VyrBJYB4lpg==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/api-extractor-model-me/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "optional": true + }, + "node_modules/api-extractor-model-me/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/api-extractor-model-me/node_modules/resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/api-extractor-model-me/node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/api-extractor-model-me/node_modules/validator": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-8.2.0.tgz", + "integrity": "sha512-Yw5wW34fSv5spzTXNkokD6S6/Oq92d8q/t14TqsS3fAiA1RYnxSFSIZ+CY3n6PGGRCq5HhJTSepQvFUS2QUDxA==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/api-extractor-model-me/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/api-extractor-model-me/node_modules/z-schema": { + "version": "3.18.4", + "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.18.4.tgz", + "integrity": "sha512-DUOKC/IhbkdLKKiV89gw9DUauTV8U/8yJl1sjf6MtDmzevLKOF2duNJ495S3MFVjqZarr+qNGCPbkg4mu4PpLw==", + "dev": true, + "dependencies": { + "lodash.get": "^4.0.0", + "lodash.isequal": "^4.0.0", + "validator": "^8.0.0" + }, + "bin": { + "z-schema": "bin/z-schema" + }, + "optionalDependencies": { + "commander": "^2.7.1" + } + }, + "node_modules/are-docs-informative": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/are-docs-informative/-/are-docs-informative-0.0.2.tgz", + "integrity": "sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true, + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/asn1.js": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", + "dev": true, + "dependencies": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/ast-kit": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ast-kit/-/ast-kit-2.1.3.tgz", + "integrity": "sha512-TH+b3Lv6pUjy/Nu0m6A2JULtdzLpmqF9x1Dhj00ZoEiML8qvVA9j1flkzTKNYgdEhWrjDwtWNpyyCUbfQe514g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.4", + "pathe": "^2.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/async": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "dev": true, + "dependencies": { + "lodash": "^4.17.14" + } + }, + "node_modules/async-mutex": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.5.0.tgz", + "integrity": "sha512-1A94B18jkJ3DYq284ohPxoXbfTA5HsQ7/Mf4DEhcyLx3Bz27Rh59iScbB6EPiP+B+joue6YCxcMXSbFC1tZKwA==", + "dev": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, + "optional": true, + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/birpc": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.6.1.tgz", + "integrity": "sha512-LPnFhlDpdSH6FJhJyn4M0kFO7vtQ5iPw24FnG0y21q09xC7e8+1LeR31S1MAIrDAHp4m7aas4bEkTDTvMAtebQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "node_modules/bn.js": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.1.tgz", + "integrity": "sha512-k8TVBiPkPJT9uHLdOKfFpqcfprwBFOAAXXozRubr7R7PfIuKvQlzcI4M0pALeqXN09vdaMbUdUj+pass+uULAg==", + "dev": true + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==", + "dev": true + }, + "node_modules/browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "dev": true + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/catharsis": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz", + "integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==", + "dev": true, + "dependencies": { + "lodash": "^4.17.15" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/chai": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chai-as-promised": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.2.tgz", + "integrity": "sha512-aBDHZxRzYnUYuIAIPBH2s511DjlKPzXNlXSGFC8CwmroWQLfrW0LtE1nK3MAwwNhJPa9raEjNCmRoFpG0Hurdw==", + "dev": true, + "dependencies": { + "check-error": "^1.0.2" + }, + "peerDependencies": { + "chai": ">= 2.1.2 < 6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/child-process-promise": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/child-process-promise/-/child-process-promise-2.2.1.tgz", + "integrity": "sha512-Fi4aNdqBsr0mv+jgWxcZ/7rAIC2mgihrptyVI4foh/rrjY/3BNjfP9+oaiFx/fzim+1ZyCNBae0DlyfQhSugog==", + "dev": true, + "dependencies": { + "cross-spawn": "^4.0.2", + "node-version": "^1.0.0", + "promise-polyfill": "^6.0.1" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz", + "integrity": "sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA==", + "dev": true + }, + "node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "node_modules/colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "dev": true, + "optional": true, + "engines": { + "node": "^12.20.0 || >=14" + } + }, + "node_modules/comment-parser": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.4.1.tgz", + "integrity": "sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", + "integrity": "sha512-yAXz/pA1tD8Gtg2S98Ekf/sewp3Lcp3YoFKJ4Hkp5h5yLWnKVTDU0kwjKJ8NDCYcfTLfyGkzTikst+jWypT1iA==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "which": "^1.2.9" + } + }, + "node_modules/cssom": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", + "integrity": "sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==", + "dev": true + }, + "node_modules/cssstyle": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", + "dev": true, + "dependencies": { + "cssom": "~0.3.6" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cssstyle/node_modules/cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/data-urls": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", + "dev": true, + "dependencies": { + "abab": "^2.0.3", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", + "dev": true + }, + "node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/domexception": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", + "deprecated": "Use your platform's native DOMException instead", + "dev": true, + "dependencies": { + "webidl-conversions": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/domexception/node_modules/webidl-conversions": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/dts-resolver": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/dts-resolver/-/dts-resolver-2.1.2.tgz", + "integrity": "sha512-xeXHBQkn2ISSXxbJWD828PFjtyg+/UrMDo7W4Ffcs7+YWCquxU8YjV1KoxuiL+eJ5pg3ll+bC6flVv61L3LKZg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.18.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "oxc-resolver": ">=11.0.0" + }, + "peerDependenciesMeta": { + "oxc-resolver": { + "optional": true + } + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dev": true, + "optional": true, + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/elliptic": { + "version": "6.6.1", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.6.1.tgz", + "integrity": "sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g==", + "dev": true, + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "optional": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/eslint": { + "version": "9.38.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.38.0.tgz", + "integrity": "sha512-t5aPOpmtJcZcz5UJyY2GbvpDlsK5E8JqRqoKtfiKE3cNh437KIqfJr3A3AKf5k64NPx6d0G3dno6XDY05PqPtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.1", + "@eslint/core": "^0.16.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.38.0", + "@eslint/plugin-kit": "^0.4.0", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-google": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/eslint-config-google/-/eslint-config-google-0.14.0.tgz", + "integrity": "sha512-WsbX4WbjuMvTdeVL6+J3rK1RGhCTqjsFjX7UMSMgZiyxxaNLkoJENbrGExzERFeoTpGw3F3FypTiWAP9ZXzkEw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + }, + "peerDependencies": { + "eslint": ">=5.16.0" + } + }, + "node_modules/eslint-config-prettier": { + "version": "10.1.8", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.8.tgz", + "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "funding": { + "url": "https://opencollective.com/eslint-config-prettier" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-jsdoc": { + "version": "61.1.9", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-61.1.9.tgz", + "integrity": "sha512-X2AzSGbq1CzBRgKcVAu2qzOV9ogqygkUDk5AX6eNK5G+kY3I5Op5E5b99fE+FN0/bGnk2KGcsMIG6ZLF+di69A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@es-joy/jsdoccomment": "~0.76.0", + "@es-joy/resolve.exports": "1.0.0", + "are-docs-informative": "^0.0.2", + "comment-parser": "1.4.1", + "debug": "^4.4.3", + "escape-string-regexp": "^4.0.0", + "espree": "^10.4.0", + "esquery": "^1.6.0", + "html-entities": "^2.6.0", + "object-deep-merge": "^2.0.0", + "parse-imports-exports": "^0.2.4", + "semver": "^7.7.3", + "spdx-expression-parse": "^4.0.0", + "to-valid-identifier": "^1.0.0" + }, + "engines": { + "node": ">=20.11.0" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-jsdoc/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-plugin-jsdoc/node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", + "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prettier-linter-helpers": "^1.0.0" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "eslint": ">=7.28.0", + "prettier": ">=2.0.0" + }, + "peerDependenciesMeta": { + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint/node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true, + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "node_modules/farmhash-modern": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/farmhash-modern/-/farmhash-modern-1.1.0.tgz", + "integrity": "sha512-6ypT4XfgqJk/F3Yuv4SX26I3doUjt0GTG4a+JgWxXQpxXzTBq8fPUeGHfcYMMDPHJHm3yPOSjaeBwBGAHWXCdA==", + "dev": true, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fast-xml-parser": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.1.tgz", + "integrity": "sha512-y655CeyUQ+jj7KBbYMc4FG01V8ZQqjN+gDYGJ50RtfsUB8iG9AmwmwoAgeKLJdmueKKMrH1RJ7yXHTSoczdv5w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "optional": true, + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/faye-websocket": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", + "dev": true, + "dependencies": { + "websocket-driver": ">=0.5.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/firebase-admin": { + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-13.0.2.tgz", + "integrity": "sha512-YWVpoN+tZVSRXF0qC0gojoF5bSqvBRbnBk8+xUtFiguM2L4vB7f0moAwV1VVWDDHvTnvQ68OyTMpdp6wKo/clw==", + "dev": true, + "dependencies": { + "@fastify/busboy": "^3.0.0", + "@firebase/database-compat": "^2.0.0", + "@firebase/database-types": "^1.0.6", + "@types/node": "^22.8.7", + "farmhash-modern": "^1.1.0", + "google-auth-library": "^9.14.2", + "jsonwebtoken": "^9.0.0", + "jwks-rsa": "^3.1.0", + "node-forge": "^1.3.1", + "uuid": "^11.0.2" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@google-cloud/firestore": "^7.11.0", + "@google-cloud/storage": "^7.14.0" + } + }, + "node_modules/firebase-admin/node_modules/@types/node": { + "version": "22.10.7", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.7.tgz", + "integrity": "sha512-V09KvXxFiutGp6B7XkpaDXlNadZxrzajcY50EuoLIpQ6WWYCSvf19lVIazzfIzQvhUN2HjX12spLojTnhuKlGg==", + "dev": true, + "dependencies": { + "undici-types": "~6.20.0" + } + }, + "node_modules/firebase-admin/node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "dev": true + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/form-data": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.4.tgz", + "integrity": "sha512-f0cRzm6dkyVYV3nPoooP8XlccPQukegwhAnpoLcXy+X+A8KfpGOoXwDr9FLZd3wzgLaBGQBE3lY93Zm/i1JvIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.35" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dev": true, + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/front-matter": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz", + "integrity": "sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==", + "dev": true, + "dependencies": { + "js-yaml": "^3.13.1" + } + }, + "node_modules/fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "dev": true, + "optional": true + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "dev": true, + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gaxios/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dev": true, + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/genkit": { + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/genkit/-/genkit-1.0.0-rc.4.tgz", + "integrity": "sha512-J82s0L+uKt90vxp/SYkBeAfjXfXHh9epPgxhZ5wumnIeJNsBtGfpQ9FCWJ8zIQgOZmqQvQv8Itcrzj4E3RTvIQ==", + "dev": true, + "dependencies": { + "@genkit-ai/ai": "1.0.0-rc.4", + "@genkit-ai/core": "1.0.0-rc.4", + "@genkit-ai/dotprompt": "1.0.0-rc.4", + "uuid": "^10.0.0" + } + }, + "node_modules/genkit/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-port": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-5.1.1.tgz", + "integrity": "sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", + "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/google-auth-library": { + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", + "dev": true, + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", + "dev": true, + "optional": true, + "dependencies": { + "@grpc/grpc-js": "^1.10.9", + "@grpc/proto-loader": "^0.7.13", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^4.0.0", + "google-auth-library": "^9.3.0", + "node-fetch": "^2.7.0", + "object-hash": "^3.0.0", + "proto3-json-serializer": "^2.0.2", + "protobufjs": "^7.3.2", + "retry-request": "^7.0.0", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dev": true, + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", + "dev": true, + "dependencies": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/html-encoding-sniffer": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", + "dev": true, + "dependencies": { + "whatwg-encoding": "^1.0.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/html-entities": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.6.0.tgz", + "integrity": "sha512-kig+rMn/QOVRvr7c86gQ8lWXq+Hkv6CbAH1hLu+RG338StTpE8Z0b44SDVaqVu7HGKf27frdmUYEs9hTUX/cLQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ], + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-parser-js": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.9.tgz", + "integrity": "sha512-n1XsPy3rXVxlqxVioEWdC+0+M+SQw0DpJynwtOPo1X+ZlvdzTLtDBIJJlDQTnwZIFJrZSzSGmIOUdP8tu+SgLw==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-in-the-middle": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.12.0.tgz", + "integrity": "sha512-yAgSE7GmtRcu4ZUSFX/4v69UGXwugFFSdIQJ14LHPOPPQrWv8Y7O9PHsw8Ovk7bKCLe4sjXMbZFqGFcLHpZ89w==", + "dev": true, + "dependencies": { + "acorn": "^8.8.2", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, + "node_modules/import-lazy": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz", + "integrity": "sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/jju": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jju/-/jju-1.4.0.tgz", + "integrity": "sha512-8wb9Yw966OSxApiCt0K3yNJL8pnNeIv+OEq2YMidz4FKP6nonSRoOXc80iXY4JaN2FC11B9qsNmDsm+ZOfMROA==", + "dev": true + }, + "node_modules/jose": { + "version": "4.15.9", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.9.tgz", + "integrity": "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/js2xmlparser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz", + "integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==", + "dev": true, + "dependencies": { + "xmlcreate": "^2.0.4" + } + }, + "node_modules/jsdoc": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.4.tgz", + "integrity": "sha512-zeFezwyXeG4syyYHbvh1A967IAqq/67yXtXvuL5wnqCkFZe8I0vKfm+EO+YEvLguo6w9CDUbrAXVtJSHh2E8rw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.15", + "@jsdoc/salty": "^0.2.1", + "@types/markdown-it": "^14.1.1", + "bluebird": "^3.7.2", + "catharsis": "^0.9.0", + "escape-string-regexp": "^2.0.0", + "js2xmlparser": "^4.0.2", + "klaw": "^3.0.0", + "markdown-it": "^14.1.0", + "markdown-it-anchor": "^8.6.7", + "marked": "^4.0.10", + "mkdirp": "^1.0.4", + "requizzle": "^0.2.3", + "strip-json-comments": "^3.1.0", + "underscore": "~1.13.2" + }, + "bin": { + "jsdoc": "jsdoc.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/jsdoc-type-pratt-parser": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-6.10.0.tgz", + "integrity": "sha512-+LexoTRyYui5iOhJGn13N9ZazL23nAHGkXsa1p/C8yeq79WRfLBag6ZZ0FQG2aRoc9yfo59JT9EYCQonOkHKkQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/jsdoc/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jsdoc/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jsdom": { + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", + "dev": true, + "dependencies": { + "abab": "^2.0.5", + "acorn": "^8.2.4", + "acorn-globals": "^6.0.0", + "cssom": "^0.4.4", + "cssstyle": "^2.3.0", + "data-urls": "^2.0.0", + "decimal.js": "^10.2.1", + "domexception": "^2.0.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", + "html-encoding-sniffer": "^2.0.1", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.0.0", + "w3c-hr-time": "^1.0.2", + "w3c-xmlserializer": "^2.0.0", + "webidl-conversions": "^6.1.0", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", + "xml-name-validator": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "canvas": "^2.5.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "node_modules/jsdom/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/jsdom/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dev": true, + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dev": true, + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dev": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dev": true, + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/just-extend": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", + "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", + "dev": true + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dev": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwk-to-pem": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/jwk-to-pem/-/jwk-to-pem-2.0.7.tgz", + "integrity": "sha512-cSVphrmWr6reVchuKQZdfSs4U9c5Y4hwZggPoz6cbVnTpAVgGRpEuQng86IyqLeGZlhTh+c4MAreB6KbdQDKHQ==", + "dev": true, + "dependencies": { + "asn1.js": "^5.3.0", + "elliptic": "^6.6.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwks-rsa": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-3.1.0.tgz", + "integrity": "sha512-v7nqlfezb9YfHHzYII3ef2a2j1XnGeSE/bK3WfumaYCqONAIstJbrEGapz4kadScZzEt7zYCN7bucj8C0Mv/Rg==", + "dev": true, + "dependencies": { + "@types/express": "^4.17.17", + "@types/jsonwebtoken": "^9.0.2", + "debug": "^4.3.4", + "jose": "^4.14.6", + "limiter": "^1.1.5", + "lru-memoizer": "^2.2.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dev": true, + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/klaw": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz", + "integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.9" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/limiter": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", + "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "dev": true + }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==", + "dev": true + }, + "node_modules/lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", + "dev": true + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "dev": true + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "dev": true + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", + "dev": true + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "dev": true + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "dev": true + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "dev": true + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/long": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==" + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/lru-memoizer": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.3.0.tgz", + "integrity": "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==", + "dev": true, + "dependencies": { + "lodash.clonedeep": "^4.5.0", + "lru-cache": "6.0.0" + } + }, + "node_modules/lru-memoizer/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/lru-memoizer/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-anchor": { + "version": "8.6.7", + "resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz", + "integrity": "sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==", + "dev": true, + "peerDependencies": { + "@types/markdown-it": "*", + "markdown-it": "*" + } + }, + "node_modules/markdown-it/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/marked": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", + "dev": true, + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "dev": true, + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", + "dev": true + }, + "node_modules/minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==", + "dev": true + }, + "node_modules/minimatch": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.8.tgz", + "integrity": "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mocha": { + "version": "10.8.2", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.8.2.tgz", + "integrity": "sha512-VZlYo/WE8t1tstuRmqgeyBgCbJc/lEdopaa+axcKzTBJ+UIdlAB9XnmvTCAH4pwR4ElNInaedhEBmZD8iCSVEg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/mocha/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/mocha/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/mocha/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/mocha/node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/mocha/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/mocha/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mock-require": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/mock-require/-/mock-require-3.0.3.tgz", + "integrity": "sha512-lLzfLHcyc10MKQnNUCv7dMcoY/2Qxd6wJfbqCcVk3LDb8An4hF6ohk5AztrvgKhJCqj36uyzi/p5se+tvyD+Wg==", + "dev": true, + "dependencies": { + "get-caller-file": "^1.0.2", + "normalize-path": "^2.1.1" + }, + "engines": { + "node": ">=4.3.0" + } + }, + "node_modules/mock-require/node_modules/normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", + "dev": true, + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/module-details-from-path": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", + "integrity": "sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==", + "dev": true + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/nise": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-4.1.0.tgz", + "integrity": "sha512-eQMEmGN/8arp0xsvGoQ+B1qvSkR73B1nWSCh7nOt5neMCtwcQVYQGdzQMhcNscktTsWB54xnlSQFzOAPJD8nXA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "path-to-regexp": "^1.7.0" + } + }, + "node_modules/nise/node_modules/path-to-regexp": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", + "dev": true, + "dependencies": { + "isarray": "0.0.1" + } + }, + "node_modules/nock": { + "version": "13.5.6", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", + "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-fetch/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, + "node_modules/node-fetch/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, + "node_modules/node-fetch/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "dev": true, + "engines": { + "node": ">= 6.13.0" + } + }, + "node_modules/node-version": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/node-version/-/node-version-1.2.0.tgz", + "integrity": "sha512-ma6oU4Sk0qOoKEAymVoTvk8EdXEobdS7m/mAGhDJ8Rouugho48crHBORAmy5BoOcv8wraPM6xumapQp5hl4iIQ==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nwsapi": { + "version": "2.2.16", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.16.tgz", + "integrity": "sha512-F1I/bimDpj3ncaNDhfyMWuFqmQDBwDB0Fogc2qpL3BWvkQteFD/8BzWuIRl83rq0DXfm8SGt/HFhLXZyljTXcQ==", + "dev": true + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-deep-merge": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/object-deep-merge/-/object-deep-merge-2.0.0.tgz", + "integrity": "sha512-3DC3UMpeffLTHiuXSy/UG4NOIYTLlY9u3V82+djSCLYClWobZiS4ivYzpIUWrRY/nfsJ8cWsKyG3QfyLePmhvg==", + "dev": true, + "license": "MIT" + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "optional": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-imports-exports": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/parse-imports-exports/-/parse-imports-exports-0.2.4.tgz", + "integrity": "sha512-4s6vd6dx1AotCx/RCI2m7t7GCh5bDRUtGNvRfHSP2wbBQdMi67pPe7mtzmgwcaQ8VKK/6IB7Glfyu3qdZJPybQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-statements": "1.0.11" + } + }, + "node_modules/parse-statements": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/parse-statements/-/parse-statements-1.0.11.tgz", + "integrity": "sha512-HlsyYdMBnbPQ9Jr/VgJ1YF4scnldvJpJxCVx6KgqPL4dxppsWrJHCIIxQXMJrqGnsRkNPATbeMJ8Yxu7JMsYcA==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/partial-json": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz", + "integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==", + "dev": true + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/portfinder": { + "version": "1.0.32", + "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.32.tgz", + "integrity": "sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==", + "dev": true, + "dependencies": { + "async": "^2.6.4", + "debug": "^3.2.7", + "mkdirp": "^0.5.6" + }, + "engines": { + "node": ">= 0.12.0" + } + }, + "node_modules/portfinder/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/promise-polyfill": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/promise-polyfill/-/promise-polyfill-6.1.0.tgz", + "integrity": "sha512-g0LWaH0gFsxovsU7R5LrrhHhWAWiHRnh1GPrhXnPgYsDkIqjRYUYSZEsej/wtleDrz5xVSIDbeKfidztp2XHFQ==", + "dev": true + }, + "node_modules/propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/proto3-json-serializer": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", + "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", + "dev": true, + "optional": true, + "dependencies": { + "protobufjs": "^7.2.5" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/protobufjs-cli": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/protobufjs-cli/-/protobufjs-cli-1.1.3.tgz", + "integrity": "sha512-MqD10lqF+FMsOayFiNOdOGNlXc4iKDCf0ZQPkPR+gizYh9gqUeGTWulABUCdI+N67w5RfJ6xhgX4J8pa8qmMXQ==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "escodegen": "^1.13.0", + "espree": "^9.0.0", + "estraverse": "^5.1.0", + "glob": "^8.0.0", + "jsdoc": "^4.0.0", + "minimist": "^1.2.0", + "semver": "^7.1.2", + "tmp": "^0.2.1", + "uglify-js": "^3.7.7" + }, + "bin": { + "pbjs": "bin/pbjs", + "pbts": "bin/pbts" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "protobufjs": "^7.0.0" + } + }, + "node_modules/protobufjs-cli/node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dev": true, + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/protobufjs-cli/node_modules/escodegen/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/protobufjs-cli/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/protobufjs-cli/node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/protobufjs-cli/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/protobufjs-cli/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dev": true, + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", + "dev": true + }, + "node_modules/psl": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz", + "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==", + "dev": true, + "dependencies": { + "punycode": "^2.3.1" + }, + "funding": { + "url": "https://github.com/sponsors/lupomontero" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/quansync": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.11.tgz", + "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/antfu" + }, + { + "type": "individual", + "url": "https://github.com/sponsors/sxzz" + } + ], + "license": "MIT" + }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "optional": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", + "dev": true + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-in-the-middle": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-7.4.0.tgz", + "integrity": "sha512-X34iHADNbNDfr6OTStIAHWSAvvKQRYgLO6duASaVf7J2VA3lvmNYboAHOuLC2huav1IwgZJtyEcJCKVzFxOSMQ==", + "dev": true, + "dependencies": { + "debug": "^4.3.5", + "module-details-from-path": "^1.0.3", + "resolve": "^1.22.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, + "node_modules/requizzle": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.4.tgz", + "integrity": "sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==", + "dev": true, + "dependencies": { + "lodash": "^4.17.21" + } + }, + "node_modules/reserved-identifiers": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/reserved-identifiers/-/reserved-identifiers-1.2.0.tgz", + "integrity": "sha512-yE7KUfFvaBFzGPs5H3Ops1RevfUEsDc5Iz65rOwWg4lE8HJSYtle77uul3+573457oHvBKuHYDl/xqUkKpEEdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "dev": true, + "optional": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dev": true, + "optional": true, + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rolldown": { + "version": "1.0.0-beta.45", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.45.tgz", + "integrity": "sha512-iMmuD72XXLf26Tqrv1cryNYLX6NNPLhZ3AmNkSf8+xda0H+yijjGJ+wVT9UdBUHOpKzq9RjKtQKRCWoEKQQBZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.95.0", + "@rolldown/pluginutils": "1.0.0-beta.45" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-beta.45", + "@rolldown/binding-darwin-arm64": "1.0.0-beta.45", + "@rolldown/binding-darwin-x64": "1.0.0-beta.45", + "@rolldown/binding-freebsd-x64": "1.0.0-beta.45", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-beta.45", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-beta.45", + "@rolldown/binding-linux-arm64-musl": "1.0.0-beta.45", + "@rolldown/binding-linux-x64-gnu": "1.0.0-beta.45", + "@rolldown/binding-linux-x64-musl": "1.0.0-beta.45", + "@rolldown/binding-openharmony-arm64": "1.0.0-beta.45", + "@rolldown/binding-wasm32-wasi": "1.0.0-beta.45", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-beta.45", + "@rolldown/binding-win32-ia32-msvc": "1.0.0-beta.45", + "@rolldown/binding-win32-x64-msvc": "1.0.0-beta.45" + } + }, + "node_modules/rolldown-plugin-dts": { + "version": "0.17.2", + "resolved": "https://registry.npmjs.org/rolldown-plugin-dts/-/rolldown-plugin-dts-0.17.2.tgz", + "integrity": "sha512-tbLm7FoDvZAhAY33wJbq0ACw+srToKZ5xFqwn/K4tayGloZPXQHyOEPEYi7whEfTCaMndZWaho9+oiQTlwIe6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/generator": "^7.28.5", + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "ast-kit": "^2.1.3", + "birpc": "^2.6.1", + "debug": "^4.4.3", + "dts-resolver": "^2.1.2", + "get-tsconfig": "^4.13.0", + "magic-string": "^0.30.21" + }, + "engines": { + "node": ">=20.18.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "@ts-macro/tsc": "^0.3.6", + "@typescript/native-preview": ">=7.0.0-dev.20250601.1", + "rolldown": "^1.0.0-beta.44", + "typescript": "^5.0.0", + "vue-tsc": "~3.1.0" + }, + "peerDependenciesMeta": { + "@ts-macro/tsc": { + "optional": true + }, + "@typescript/native-preview": { + "optional": true + }, + "typescript": { + "optional": true + }, + "vue-tsc": { + "optional": true + } + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/saxes": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", + "dev": true, + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==", + "dev": true + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sinon": { + "version": "9.2.4", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.2.4.tgz", + "integrity": "sha512-zljcULZQsJxVra28qIAL6ow1Z9tpattkCTEJR4RBP3TGc00FcttsP5pK284Nas5WjMZU5Yzy3kAIp3B3KRf5Yg==", + "deprecated": "16.1.1", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.8.1", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/samsam": "^5.3.1", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "node_modules/sinon/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-4.0.0.tgz", + "integrity": "sha512-Clya5JIij/7C6bRR22+tnGXbc4VKlibKSVj2iHvVeX5iMW7s1SIQlqu699JkODJJIhh/pUu8L0/VLh8xflD+LQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", + "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dev": true, + "optional": true, + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", + "dev": true, + "optional": true + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "optional": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-argv": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", + "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", + "dev": true, + "engines": { + "node": ">=0.6.19" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==", + "dev": true, + "optional": true + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==", + "dev": true, + "optional": true + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, + "node_modules/synckit": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", + "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, + "node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dev": true, + "optional": true, + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/teeny-request/node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "optional": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "optional": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/teeny-request/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "optional": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "optional": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/timsort": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", + "integrity": "sha512-qsdtZH+vMoCARQtyod4imc2nIJwg9Cc7lPRrw9CzF8ZKR0khdr8+2nX80PBhET3tcyTtJDxAffGh2rXH4tyU8A==", + "dev": true + }, + "node_modules/tinyexec": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", + "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/tmp": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/to-valid-identifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-valid-identifier/-/to-valid-identifier-1.0.0.tgz", + "integrity": "sha512-41wJyvKep3yT2tyPqX/4blcfybknGB4D+oETKLs7Q76UiPqRpUJK3hr1nxelyYO0PHKVzJwlu0aCeEAsGI6rpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/base62": "^1.0.0", + "reserved-identifiers": "^1.0.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tough-cookie": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz", + "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==", + "dev": true, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/tr46": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tree-kill": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", + "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", + "dev": true, + "license": "MIT", + "bin": { + "tree-kill": "cli.js" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/tsdown": { + "version": "0.15.11", + "resolved": "https://registry.npmjs.org/tsdown/-/tsdown-0.15.11.tgz", + "integrity": "sha512-7k2OglWWt6LzvJKwEf1izbGvETvVfPYRBr9JgEYVRnz/R9LeJSp+B51FUMO46wUeEGtZ1jA3E3PtWWLlq3iygA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansis": "^4.2.0", + "cac": "^6.7.14", + "chokidar": "^4.0.3", + "debug": "^4.4.3", + "diff": "^8.0.2", + "empathic": "^2.0.0", + "hookable": "^5.5.3", + "rolldown": "1.0.0-beta.45", + "rolldown-plugin-dts": "^0.17.1", + "semver": "^7.7.3", + "tinyexec": "^1.0.1", + "tinyglobby": "^0.2.15", + "tree-kill": "^1.2.2", + "unconfig": "^7.3.3", + "unrun": "^0.2.0" + }, + "bin": { + "tsdown": "dist/run.mjs" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "@arethetypeswrong/core": "^0.18.1", + "publint": "^0.3.0", + "typescript": "^5.0.0", + "unplugin-lightningcss": "^0.4.0", + "unplugin-unused": "^0.5.0" + }, + "peerDependenciesMeta": { + "@arethetypeswrong/core": { + "optional": true + }, + "publint": { + "optional": true + }, + "typescript": { + "optional": true + }, + "unplugin-lightningcss": { + "optional": true + }, + "unplugin-unused": { + "optional": true + } + } + }, + "node_modules/tsdown/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/tsdown/node_modules/diff": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.2.tgz", + "integrity": "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/tsdown/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/unconfig": { + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/unconfig/-/unconfig-7.3.3.tgz", + "integrity": "sha512-QCkQoOnJF8L107gxfHL0uavn7WD9b3dpBcFX6HtfQYmjw2YzWxGuFQ0N0J6tE9oguCBJn9KOvfqYDCMPHIZrBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@quansync/fs": "^0.1.5", + "defu": "^6.1.4", + "jiti": "^2.5.1", + "quansync": "^0.2.11" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/underscore": { + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", + "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==", + "dev": true + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/unrun": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/unrun/-/unrun-0.2.1.tgz", + "integrity": "sha512-1HpwmlCKrAOP3jPxFisPR0sYpPuiNtyYKJbmKu9iugIdvCte3DH1uJ1p1DBxUWkxW2pjvkUguJoK9aduK8ak3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/runtime": "^0.95.0", + "rolldown": "1.0.0-beta.45", + "synckit": "^0.11.11" + }, + "bin": { + "unrun": "dist/cli.js" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/Gugustinette" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "optional": true + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.5.tgz", + "integrity": "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, + "node_modules/validator": { + "version": "13.12.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.12.0.tgz", + "integrity": "sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/w3c-hr-time": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "deprecated": "Use your platform's native performance.now() and performance.timeOrigin.", + "dev": true, + "dependencies": { + "browser-process-hrtime": "^1.0.0" + } + }, + "node_modules/w3c-xmlserializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", + "dev": true, + "dependencies": { + "xml-name-validator": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/webidl-conversions": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", + "dev": true, + "engines": { + "node": ">=10.4" + } + }, + "node_modules/websocket-driver": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", + "dev": true, + "dependencies": { + "http-parser-js": ">=0.5.1", + "safe-buffer": ">=5.1.0", + "websocket-extensions": ">=0.1.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/websocket-extensions": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/whatwg-encoding": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, + "dependencies": { + "iconv-lite": "0.4.24" + } + }, + "node_modules/whatwg-mimetype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true + }, + "node_modules/whatwg-url": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", + "dev": true, + "dependencies": { + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/which-module": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", + "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", + "dev": true + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true + }, + "node_modules/workerpool": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/ws": { + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", + "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", + "dev": true, + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/xml-name-validator": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", + "dev": true + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true + }, + "node_modules/xmlcreate": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz", + "integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==", + "dev": true + }, + "node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/yargs/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/z-schema": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-5.0.5.tgz", + "integrity": "sha512-D7eujBWkLa3p2sIpJA0d1pr7es+a7m0vFAnZLlCEKq/Ij2k0MLi9Br2UPxoxdYystm5K1yeBGzub0FlYUEWj2Q==", + "dev": true, + "dependencies": { + "lodash.get": "^4.4.2", + "lodash.isequal": "^4.5.0", + "validator": "^13.7.0" + }, + "bin": { + "z-schema": "bin/z-schema" + }, + "engines": { + "node": ">=8.0.0" + }, + "optionalDependencies": { + "commander": "^9.4.1" + } + }, + "node_modules/zod": { + "version": "3.24.1", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.1.tgz", + "integrity": "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-to-json-schema": { + "version": "3.24.1", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz", + "integrity": "sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==", + "dev": true, + "peerDependencies": { + "zod": "^3.24.1" + } + } + } +} diff --git a/package.json b/package.json index 85f2828c0..d1a002815 100644 --- a/package.json +++ b/package.json @@ -1,68 +1,562 @@ { "name": "firebase-functions", - "version": "1.1.0", + "version": "7.0.0-rc.2", "description": "Firebase SDK for Cloud Functions", - "main": "lib/index.js", - "scripts": { - "build": "node_modules/.bin/tsc -p tsconfig.release.json", - "build:pack": "rm -rf lib && npm install && node_modules/.bin/tsc -p tsconfig.release.json && npm pack", - "build:release": "npm install --production && npm install typescript firebase-admin && node_modules/.bin/tsc -p tsconfig.release.json", - "lint": "node_modules/.bin/tslint src/{**/*,*}.ts spec/{**/*,*}.ts integration_test/functions/src/{**/*,*}.ts", - "pretest": "node_modules/.bin/tsc && cp -r spec/fixtures .tmp/spec", - "test": "mocha .tmp/spec/index.spec.js", - "posttest": "npm run lint && rm -rf .tmp", - "postinstall": "node ./upgrade-warning" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/firebase/firebase-functions.git" - }, "keywords": [ "firebase", "functions", "google", "cloud" ], - "author": "Firebase Team", - "license": "MIT", + "homepage": "https://github.com/firebase/firebase-functions#readme", "bugs": { "url": "https://github.com/firebase/firebase-functions/issues" }, - "homepage": "https://github.com/firebase/firebase-functions#readme", - "devDependencies": { - "@types/chai": "^3.4.32", - "@types/chai-as-promised": "0.0.28", - "@types/mocha": "^2.2.31", - "@types/mock-require": "^1.3.3", - "@types/nock": "^0.54.32", - "@types/node": "^6.0.38", - "@types/sinon": "^1.16.29", - "chai": "^3.5.0", - "chai-as-promised": "^5.2.0", - "firebase-admin": "~5.12.1", - "istanbul": "^0.4.2", - "mocha": "^2.4.5", - "mock-require": "^2.0.1", - "nock": "^9.0.0", - "sinon": "^1.17.4", - "tslint": "^3.15.1", - "typescript": "~2.8.3" + "repository": { + "type": "git", + "url": "https://github.com/firebase/firebase-functions.git" }, - "peerDependencies": { - "firebase-admin": "~5.12.1" + "license": "MIT", + "author": "Firebase Team", + "files": [ + ".guides", + "lib", + "protos" + ], + "main": "lib/v2/index.js", + "bin": { + "firebase-functions": "./lib/bin/firebase-functions.js" + }, + "types": "lib/v2/index.d.ts", + "sideEffects": [ + "./lib/logger/compat.js", + "./lib/esm/logger/compat.mjs" + ], + "exports": { + "./logger/compat": { + "types": "./lib/logger/compat.d.ts", + "import": "./lib/esm/logger/compat.mjs", + "require": "./lib/logger/compat.js" + }, + "./logger": { + "types": "./lib/logger/index.d.ts", + "import": "./lib/esm/logger/index.mjs", + "require": "./lib/logger/index.js" + }, + "./params": { + "types": "./lib/params/index.d.ts", + "import": "./lib/esm/params/index.mjs", + "require": "./lib/params/index.js" + }, + "./v1": { + "types": "./lib/v1/index.d.ts", + "import": "./lib/esm/v1/index.mjs", + "require": "./lib/v1/index.js" + }, + "./v1/analytics": { + "types": "./lib/v1/providers/analytics.d.ts", + "import": "./lib/esm/v1/providers/analytics.mjs", + "require": "./lib/v1/providers/analytics.js" + }, + "./v1/auth": { + "types": "./lib/v1/providers/auth.d.ts", + "import": "./lib/esm/v1/providers/auth.mjs", + "require": "./lib/v1/providers/auth.js" + }, + "./v1/database": { + "types": "./lib/v1/providers/database.d.ts", + "import": "./lib/esm/v1/providers/database.mjs", + "require": "./lib/v1/providers/database.js" + }, + "./v1/firestore": { + "types": "./lib/v1/providers/firestore.d.ts", + "import": "./lib/esm/v1/providers/firestore.mjs", + "require": "./lib/v1/providers/firestore.js" + }, + "./v1/https": { + "types": "./lib/v1/providers/https.d.ts", + "import": "./lib/esm/v1/providers/https.mjs", + "require": "./lib/v1/providers/https.js" + }, + "./v1/pubsub": { + "types": "./lib/v1/providers/pubsub.d.ts", + "import": "./lib/esm/v1/providers/pubsub.mjs", + "require": "./lib/v1/providers/pubsub.js" + }, + "./v1/remoteConfig": { + "types": "./lib/v1/providers/remoteConfig.d.ts", + "import": "./lib/esm/v1/providers/remoteConfig.mjs", + "require": "./lib/v1/providers/remoteConfig.js" + }, + "./v1/storage": { + "types": "./lib/v1/providers/storage.d.ts", + "import": "./lib/esm/v1/providers/storage.mjs", + "require": "./lib/v1/providers/storage.js" + }, + "./v1/tasks": { + "types": "./lib/v1/providers/tasks.d.ts", + "import": "./lib/esm/v1/providers/tasks.mjs", + "require": "./lib/v1/providers/tasks.js" + }, + "./v1/testLab": { + "types": "./lib/v1/providers/testLab.d.ts", + "import": "./lib/esm/v1/providers/testLab.mjs", + "require": "./lib/v1/providers/testLab.js" + }, + ".": { + "types": "./lib/v2/index.d.ts", + "import": "./lib/esm/v2/index.mjs", + "require": "./lib/v2/index.js" + }, + "./core": { + "types": "./lib/v2/core.d.ts", + "import": "./lib/esm/v2/core.mjs", + "require": "./lib/v2/core.js" + }, + "./options": { + "types": "./lib/v2/options.d.ts", + "import": "./lib/esm/v2/options.mjs", + "require": "./lib/v2/options.js" + }, + "./https": { + "types": "./lib/v2/providers/https.d.ts", + "import": "./lib/esm/v2/providers/https.mjs", + "require": "./lib/v2/providers/https.js" + }, + "./pubsub": { + "types": "./lib/v2/providers/pubsub.d.ts", + "import": "./lib/esm/v2/providers/pubsub.mjs", + "require": "./lib/v2/providers/pubsub.js" + }, + "./storage": { + "types": "./lib/v2/providers/storage.d.ts", + "import": "./lib/esm/v2/providers/storage.mjs", + "require": "./lib/v2/providers/storage.js" + }, + "./tasks": { + "types": "./lib/v2/providers/tasks.d.ts", + "import": "./lib/esm/v2/providers/tasks.mjs", + "require": "./lib/v2/providers/tasks.js" + }, + "./alerts": { + "types": "./lib/v2/providers/alerts/index.d.ts", + "import": "./lib/esm/v2/providers/alerts/index.mjs", + "require": "./lib/v2/providers/alerts/index.js" + }, + "./alerts/appDistribution": { + "types": "./lib/v2/providers/alerts/appDistribution.d.ts", + "import": "./lib/esm/v2/providers/alerts/appDistribution.mjs", + "require": "./lib/v2/providers/alerts/appDistribution.js" + }, + "./alerts/billing": { + "types": "./lib/v2/providers/alerts/billing.d.ts", + "import": "./lib/esm/v2/providers/alerts/billing.mjs", + "require": "./lib/v2/providers/alerts/billing.js" + }, + "./alerts/crashlytics": { + "types": "./lib/v2/providers/alerts/crashlytics.d.ts", + "import": "./lib/esm/v2/providers/alerts/crashlytics.mjs", + "require": "./lib/v2/providers/alerts/crashlytics.js" + }, + "./alerts/performance": { + "types": "./lib/v2/providers/alerts/performance.d.ts", + "import": "./lib/esm/v2/providers/alerts/performance.mjs", + "require": "./lib/v2/providers/alerts/performance.js" + }, + "./eventarc": { + "types": "./lib/v2/providers/eventarc.d.ts", + "import": "./lib/esm/v2/providers/eventarc.mjs", + "require": "./lib/v2/providers/eventarc.js" + }, + "./identity": { + "types": "./lib/v2/providers/identity.d.ts", + "import": "./lib/esm/v2/providers/identity.mjs", + "require": "./lib/v2/providers/identity.js" + }, + "./database": { + "types": "./lib/v2/providers/database.d.ts", + "import": "./lib/esm/v2/providers/database.mjs", + "require": "./lib/v2/providers/database.js" + }, + "./scheduler": { + "types": "./lib/v2/providers/scheduler.d.ts", + "import": "./lib/esm/v2/providers/scheduler.mjs", + "require": "./lib/v2/providers/scheduler.js" + }, + "./remoteConfig": { + "types": "./lib/v2/providers/remoteConfig.d.ts", + "import": "./lib/esm/v2/providers/remoteConfig.mjs", + "require": "./lib/v2/providers/remoteConfig.js" + }, + "./testLab": { + "types": "./lib/v2/providers/testLab.d.ts", + "import": "./lib/esm/v2/providers/testLab.mjs", + "require": "./lib/v2/providers/testLab.js" + }, + "./firestore": { + "types": "./lib/v2/providers/firestore.d.ts", + "import": "./lib/esm/v2/providers/firestore.mjs", + "require": "./lib/v2/providers/firestore.js" + }, + "./dataconnect": { + "types": "./lib/v2/providers/dataconnect.d.ts", + "import": "./lib/esm/v2/providers/dataconnect.mjs", + "require": "./lib/v2/providers/dataconnect.js" + }, + "./v2": { + "types": "./lib/v2/index.d.ts", + "import": "./lib/esm/v2/index.mjs", + "require": "./lib/v2/index.js" + }, + "./v2/core": { + "types": "./lib/v2/core.d.ts", + "import": "./lib/esm/v2/core.mjs", + "require": "./lib/v2/core.js" + }, + "./v2/options": { + "types": "./lib/v2/options.d.ts", + "import": "./lib/esm/v2/options.mjs", + "require": "./lib/v2/options.js" + }, + "./v2/https": { + "types": "./lib/v2/providers/https.d.ts", + "import": "./lib/esm/v2/providers/https.mjs", + "require": "./lib/v2/providers/https.js" + }, + "./v2/pubsub": { + "types": "./lib/v2/providers/pubsub.d.ts", + "import": "./lib/esm/v2/providers/pubsub.mjs", + "require": "./lib/v2/providers/pubsub.js" + }, + "./v2/storage": { + "types": "./lib/v2/providers/storage.d.ts", + "import": "./lib/esm/v2/providers/storage.mjs", + "require": "./lib/v2/providers/storage.js" + }, + "./v2/tasks": { + "types": "./lib/v2/providers/tasks.d.ts", + "import": "./lib/esm/v2/providers/tasks.mjs", + "require": "./lib/v2/providers/tasks.js" + }, + "./v2/alerts": { + "types": "./lib/v2/providers/alerts/index.d.ts", + "import": "./lib/esm/v2/providers/alerts/index.mjs", + "require": "./lib/v2/providers/alerts/index.js" + }, + "./v2/alerts/appDistribution": { + "types": "./lib/v2/providers/alerts/appDistribution.d.ts", + "import": "./lib/esm/v2/providers/alerts/appDistribution.mjs", + "require": "./lib/v2/providers/alerts/appDistribution.js" + }, + "./v2/alerts/billing": { + "types": "./lib/v2/providers/alerts/billing.d.ts", + "import": "./lib/esm/v2/providers/alerts/billing.mjs", + "require": "./lib/v2/providers/alerts/billing.js" + }, + "./v2/alerts/crashlytics": { + "types": "./lib/v2/providers/alerts/crashlytics.d.ts", + "import": "./lib/esm/v2/providers/alerts/crashlytics.mjs", + "require": "./lib/v2/providers/alerts/crashlytics.js" + }, + "./v2/alerts/performance": { + "types": "./lib/v2/providers/alerts/performance.d.ts", + "import": "./lib/esm/v2/providers/alerts/performance.mjs", + "require": "./lib/v2/providers/alerts/performance.js" + }, + "./v2/eventarc": { + "types": "./lib/v2/providers/eventarc.d.ts", + "import": "./lib/esm/v2/providers/eventarc.mjs", + "require": "./lib/v2/providers/eventarc.js" + }, + "./v2/identity": { + "types": "./lib/v2/providers/identity.d.ts", + "import": "./lib/esm/v2/providers/identity.mjs", + "require": "./lib/v2/providers/identity.js" + }, + "./v2/database": { + "types": "./lib/v2/providers/database.d.ts", + "import": "./lib/esm/v2/providers/database.mjs", + "require": "./lib/v2/providers/database.js" + }, + "./v2/scheduler": { + "types": "./lib/v2/providers/scheduler.d.ts", + "import": "./lib/esm/v2/providers/scheduler.mjs", + "require": "./lib/v2/providers/scheduler.js" + }, + "./v2/remoteConfig": { + "types": "./lib/v2/providers/remoteConfig.d.ts", + "import": "./lib/esm/v2/providers/remoteConfig.mjs", + "require": "./lib/v2/providers/remoteConfig.js" + }, + "./v2/testLab": { + "types": "./lib/v2/providers/testLab.d.ts", + "import": "./lib/esm/v2/providers/testLab.mjs", + "require": "./lib/v2/providers/testLab.js" + }, + "./v2/firestore": { + "types": "./lib/v2/providers/firestore.d.ts", + "import": "./lib/esm/v2/providers/firestore.mjs", + "require": "./lib/v2/providers/firestore.js" + }, + "./v2/dataconnect": { + "types": "./lib/v2/providers/dataconnect.d.ts", + "import": "./lib/esm/v2/providers/dataconnect.mjs", + "require": "./lib/v2/providers/dataconnect.js" + } + }, + "typesVersions": { + "*": { + "logger": [ + "lib/logger/index" + ], + "logger/compat": [ + "lib/logger/compat" + ], + "params": [ + "lib/params/index" + ], + "v1": [ + "lib/v1/index" + ], + "v1/analytics": [ + "lib/v1/providers/analytics" + ], + "v1/auth": [ + "lib/v1/providers/auth" + ], + "v1/database": [ + "lib/v1/providers/database" + ], + "v1/firestore": [ + "lib/v1/providers/firestore" + ], + "v1/https": [ + "lib/v1/providers/https" + ], + "v1/pubsub": [ + "lib/v1/providers/pubsub" + ], + "v1/remoteConfig": [ + "lib/v1/providers/remoteConfig" + ], + "v1/storage": [ + "lib/v1/providers/storage" + ], + "v1/tasks": [ + "lib/v1/providers/tasks" + ], + "v1/testLab": [ + "lib/v1/providers/testLab" + ], + "core": [ + "lib/v2/core" + ], + "options": [ + "lib/v2/options" + ], + "https": [ + "lib/v2/providers/https" + ], + "pubsub": [ + "lib/v2/providers/pubsub" + ], + "storage": [ + "lib/v2/providers/storage" + ], + "tasks": [ + "lib/v2/providers/tasks" + ], + "alerts": [ + "lib/v2/providers/alerts/index" + ], + "alerts/appDistribution": [ + "lib/v2/providers/alerts/appDistribution" + ], + "alerts/billing": [ + "lib/v2/providers/alerts/billing" + ], + "alerts/crashlytics": [ + "lib/v2/providers/alerts/crashlytics" + ], + "alerts/performance": [ + "lib/v2/providers/alerts/performance" + ], + "eventarc": [ + "lib/v2/providers/eventarc" + ], + "identity": [ + "lib/v2/providers/identity" + ], + "database": [ + "lib/v2/providers/database" + ], + "scheduler": [ + "lib/v2/providers/scheduler" + ], + "remoteConfig": [ + "lib/v2/providers/remoteConfig" + ], + "testLab": [ + "lib/v2/providers/testLab" + ], + "firestore": [ + "lib/v2/providers/firestore" + ], + "dataconnect": [ + "lib/v2/providers/dataconnect" + ], + "v2": [ + "lib/v2/index" + ], + "v2/core": [ + "lib/v2/core" + ], + "v2/alerts": [ + "lib/v2/providers/alerts/index" + ], + "v2/alerts/appDistribution": [ + "lib/v2/providers/alerts/appDistribution" + ], + "v2/alerts/billing": [ + "lib/v2/providers/alerts/billing" + ], + "v2/alerts/crashlytics": [ + "lib/v2/providers/alerts/crashlytics" + ], + "v2/alerts/performance": [ + "lib/v2/providers/alerts/performance" + ], + "v2/base": [ + "lib/v2/base" + ], + "v2/database": [ + "lib/v2/providers/database" + ], + "v2/eventarc": [ + "lib/v2/providers/eventarc" + ], + "v2/identity": [ + "lib/v2/providers/identity" + ], + "v2/options": [ + "lib/v2/options" + ], + "v2/https": [ + "lib/v2/providers/https" + ], + "v2/pubsub": [ + "lib/v2/providers/pubsub" + ], + "v2/storage": [ + "lib/v2/providers/storage" + ], + "v2/tasks": [ + "lib/v2/providers/tasks" + ], + "v2/scheduler": [ + "lib/v2/providers/scheduler" + ], + "v2/remoteConfig": [ + "lib/v2/providers/remoteConfig" + ], + "v2/testLab": [ + "lib/v2/providers/testLab" + ], + "v2/firestore": [ + "lib/v2/providers/firestore" + ], + "v2/dataconnect": [ + "lib/v2/providers/dataconnect" + ], + "*": [ + "lib/v2/index.d.ts" + ] + } + }, + "publishConfig": { + "registry": "https://wombat-dressing-room.appspot.com" + }, + "scripts": { + "docgen:v1:extract": "api-extractor run -c docgen/api-extractor.v1.json --local", + "docgen:v1:toc": "ts-node docgen/toc.ts --input docgen/v1 --output docgen/v1/markdown/toc --path /docs/reference/functions", + "docgen:v1:gen": "api-documenter-fire markdown -i docgen/v1 -o docgen/v1/markdown --project functions && npm run docgen:v1:toc", + "docgen:v1": "npm run build && npm run docgen:v1:extract && npm run docgen:v1:gen", + "docgen:v2:extract": "api-extractor run -c docgen/api-extractor.v2.json --local", + "docgen:v2:toc": "ts-node docgen/toc.ts --input docgen/v2 --output docgen/v2/markdown/toc --path /docs/reference/functions/2nd-gen/node", + "docgen:v2:gen": "api-documenter-fire markdown -i docgen/v2 -o docgen/v2/markdown --project functions && npm run docgen:v2:toc", + "docgen:v2": "npm run build && npm run docgen:v2:extract && npm run docgen:v2:gen", + "build": "tsdown && tsc -p tsconfig.release.json", + "build:pack": "rm -rf lib && npm install && npm run build && npm pack", + "build:watch": "npm run build -- -w", + "format": "npm run format:ts && npm run format:other", + "format:other": "npm run lint:other -- --write", + "format:ts": "npm run lint:ts -- --fix --quiet", + "lint": "npm run lint:ts && npm run lint:other", + "lint:other": "prettier --check '**/*.{md,yaml,yml}'", + "lint:quiet": "npm run lint:ts -- --quiet && npm run lint:other", + "lint:ts": "eslint .", + "test": "mocha --file ./mocha/setup.ts \"spec/**/*.spec.ts\"", + "test:bin": "./scripts/bin-test/run.sh", + "test:packaging": "./scripts/test-packaging.sh", + "test:postmerge": "./integration_test/run_tests.sh" }, "dependencies": { - "@types/cors": "^2.8.1", - "@types/express": "^4.11.1", - "@types/jsonwebtoken": "^7.2.6", - "@types/lodash": "^4.14.34", - "cors": "^2.8.4", - "express": "^4.16.2", - "jsonwebtoken": "^8.2.1", - "lodash": "^4.6.1" + "@types/cors": "^2.8.5", + "@types/express": "^4.17.21", + "cors": "^2.8.5", + "express": "^4.21.0", + "protobufjs": "^7.2.2" }, - "engines": { - "node": ">=6.0.0" + "devDependencies": { + "@eslint/eslintrc": "^3.3.1", + "@firebase/api-documenter": "^0.2.0", + "@microsoft/api-documenter": "^7.13.45", + "@microsoft/api-extractor": "^7.18.7", + "@types/chai": "^4.1.7", + "@types/chai-as-promised": "^7.1.0", + "@types/jsonwebtoken": "^9.0.0", + "@types/mocha": "^5.2.7", + "@types/mock-require": "^2.0.0", + "@types/nock": "^10.0.3", + "@types/node": "^18.0.0", + "@types/node-fetch": "^3.0.3", + "@types/sinon": "^9.0.11", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", + "api-extractor-model-me": "^0.1.1", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "child-process-promise": "^2.2.1", + "eslint": "^9.38.0", + "eslint-config-google": "^0.14.0", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-jsdoc": "^61.1.9", + "eslint-plugin-prettier": "^4.2.1", + "firebase-admin": "^13.0.0", + "genkit": "^1.0.0-rc.4", + "jsdom": "^16.2.1", + "jsonwebtoken": "^9.0.0", + "jwk-to-pem": "^2.0.5", + "mocha": "^10.2.0", + "mock-require": "^3.0.3", + "mz": "^2.7.0", + "nock": "^13.2.9", + "node-fetch": "^2.6.7", + "portfinder": "^1.0.28", + "prettier": "^2.8.8", + "protobufjs-cli": "^1.1.1", + "semver": "^7.3.5", + "sinon": "^9.2.4", + "ts-node": "^10.4.0", + "tsdown": "^0.15.11", + "typescript": "^5.9.3", + "yaml": "^2.8.1", + "yargs": "^15.3.1" }, - "typings": "lib/index.d.ts" + "peerDependencies": { + "firebase-admin": "^11.10.0 || ^12.0.0 || ^13.0.0" + }, + "engines": { + "node": ">=18.0.0" + } } diff --git a/protos/README.md b/protos/README.md new file mode 100644 index 000000000..7ec1cd66d --- /dev/null +++ b/protos/README.md @@ -0,0 +1,15 @@ +# Generate compiled ProtoBuf + +Running the script will generate statically-compiled protobufs for decoding `application/protobuf` events. Generate them by running: + +``` +./update.sh +``` + +In order to build, the following repos are cloned + +- https://github.com/googleapis/google-cloudevents +- https://github.com/googleapis/googleapis +- https://github.com/google/protobuf + +The script relies on the [protobufjs-cli](https://github.com/protobufjs/protobuf.js/tree/master/cli#pbts-for-typescript) package to create the compiled js/ts files. diff --git a/protos/compiledFirestore.d.ts b/protos/compiledFirestore.d.ts new file mode 100644 index 000000000..f7ad097a1 --- /dev/null +++ b/protos/compiledFirestore.d.ts @@ -0,0 +1,1342 @@ +import * as $protobuf from "protobufjs"; +import Long = require("long"); +/** Namespace google. */ +export namespace google { + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a Struct. */ + interface IStruct { + + /** Struct fields */ + fields?: ({ [k: string]: google.protobuf.IValue }|null); + } + + /** Represents a Struct. */ + class Struct implements IStruct { + + /** + * Constructs a new Struct. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IStruct); + + /** Struct fields. */ + public fields: { [k: string]: google.protobuf.IValue }; + + /** + * Creates a new Struct instance using the specified properties. + * @param [properties] Properties to set + * @returns Struct instance + */ + public static create(properties?: google.protobuf.IStruct): google.protobuf.Struct; + + /** + * Encodes the specified Struct message. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @param message Struct message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IStruct, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Struct message, length delimited. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @param message Struct message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IStruct, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Struct message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Struct; + + /** + * Decodes a Struct message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Struct; + + /** + * Verifies a Struct message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Struct message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Struct + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Struct; + + /** + * Creates a plain object from a Struct message. Also converts values to other types if specified. + * @param message Struct + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Struct, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Struct to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Struct + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Value. */ + interface IValue { + + /** Value nullValue */ + nullValue?: (google.protobuf.NullValue|null); + + /** Value numberValue */ + numberValue?: (number|null); + + /** Value stringValue */ + stringValue?: (string|null); + + /** Value boolValue */ + boolValue?: (boolean|null); + + /** Value structValue */ + structValue?: (google.protobuf.IStruct|null); + + /** Value listValue */ + listValue?: (google.protobuf.IListValue|null); + } + + /** Represents a Value. */ + class Value implements IValue { + + /** + * Constructs a new Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IValue); + + /** Value nullValue. */ + public nullValue?: (google.protobuf.NullValue|null); + + /** Value numberValue. */ + public numberValue?: (number|null); + + /** Value stringValue. */ + public stringValue?: (string|null); + + /** Value boolValue. */ + public boolValue?: (boolean|null); + + /** Value structValue. */ + public structValue?: (google.protobuf.IStruct|null); + + /** Value listValue. */ + public listValue?: (google.protobuf.IListValue|null); + + /** Value kind. */ + public kind?: ("nullValue"|"numberValue"|"stringValue"|"boolValue"|"structValue"|"listValue"); + + /** + * Creates a new Value instance using the specified properties. + * @param [properties] Properties to set + * @returns Value instance + */ + public static create(properties?: google.protobuf.IValue): google.protobuf.Value; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @param message Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @param message Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Value; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Value; + + /** + * Verifies a Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Value; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @param message Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** NullValue enum. */ + enum NullValue { + NULL_VALUE = 0 + } + + /** Properties of a ListValue. */ + interface IListValue { + + /** ListValue values */ + values?: (google.protobuf.IValue[]|null); + } + + /** Represents a ListValue. */ + class ListValue implements IListValue { + + /** + * Constructs a new ListValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IListValue); + + /** ListValue values. */ + public values: google.protobuf.IValue[]; + + /** + * Creates a new ListValue instance using the specified properties. + * @param [properties] Properties to set + * @returns ListValue instance + */ + public static create(properties?: google.protobuf.IListValue): google.protobuf.ListValue; + + /** + * Encodes the specified ListValue message. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @param message ListValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IListValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListValue message, length delimited. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @param message ListValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IListValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ListValue; + + /** + * Decodes a ListValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ListValue; + + /** + * Verifies a ListValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ListValue; + + /** + * Creates a plain object from a ListValue message. Also converts values to other types if specified. + * @param message ListValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ListValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ListValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Timestamp. */ + interface ITimestamp { + + /** Timestamp seconds */ + seconds?: (number|Long|null); + + /** Timestamp nanos */ + nanos?: (number|null); + } + + /** Represents a Timestamp. */ + class Timestamp implements ITimestamp { + + /** + * Constructs a new Timestamp. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ITimestamp); + + /** Timestamp seconds. */ + public seconds: (number|Long); + + /** Timestamp nanos. */ + public nanos: number; + + /** + * Creates a new Timestamp instance using the specified properties. + * @param [properties] Properties to set + * @returns Timestamp instance + */ + public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; + + /** + * Verifies a Timestamp message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Timestamp + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @param message Timestamp + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Timestamp to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Timestamp + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an Any. */ + interface IAny { + + /** Any typeUrl */ + typeUrl?: (string|null); + + /** Any value */ + value?: (Uint8Array|null); + } + + /** Represents an Any. */ + class Any implements IAny { + + /** + * Constructs a new Any. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IAny); + + /** Any typeUrl. */ + public typeUrl: string; + + /** Any value. */ + public value: Uint8Array; + + /** + * Creates a new Any instance using the specified properties. + * @param [properties] Properties to set + * @returns Any instance + */ + public static create(properties?: google.protobuf.IAny): google.protobuf.Any; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Any message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; + + /** + * Verifies an Any message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Any + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Any; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @param message Any + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Any to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Any + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Namespace events. */ + namespace events { + + /** Namespace cloud. */ + namespace cloud { + + /** Namespace firestore. */ + namespace firestore { + + /** Namespace v1. */ + namespace v1 { + + /** Properties of a DocumentEventData. */ + interface IDocumentEventData { + + /** DocumentEventData value */ + value?: (google.events.cloud.firestore.v1.IDocument|null); + + /** DocumentEventData oldValue */ + oldValue?: (google.events.cloud.firestore.v1.IDocument|null); + + /** DocumentEventData updateMask */ + updateMask?: (google.events.cloud.firestore.v1.IDocumentMask|null); + } + + /** Represents a DocumentEventData. */ + class DocumentEventData implements IDocumentEventData { + + /** + * Constructs a new DocumentEventData. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IDocumentEventData); + + /** DocumentEventData value. */ + public value?: (google.events.cloud.firestore.v1.IDocument|null); + + /** DocumentEventData oldValue. */ + public oldValue?: (google.events.cloud.firestore.v1.IDocument|null); + + /** DocumentEventData updateMask. */ + public updateMask?: (google.events.cloud.firestore.v1.IDocumentMask|null); + + /** + * Creates a new DocumentEventData instance using the specified properties. + * @param [properties] Properties to set + * @returns DocumentEventData instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IDocumentEventData): google.events.cloud.firestore.v1.DocumentEventData; + + /** + * Encodes the specified DocumentEventData message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @param message DocumentEventData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IDocumentEventData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DocumentEventData message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @param message DocumentEventData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IDocumentEventData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.DocumentEventData; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.DocumentEventData; + + /** + * Verifies a DocumentEventData message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DocumentEventData message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DocumentEventData + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.DocumentEventData; + + /** + * Creates a plain object from a DocumentEventData message. Also converts values to other types if specified. + * @param message DocumentEventData + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.DocumentEventData, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DocumentEventData to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DocumentEventData + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a DocumentMask. */ + interface IDocumentMask { + + /** DocumentMask fieldPaths */ + fieldPaths?: (string[]|null); + } + + /** Represents a DocumentMask. */ + class DocumentMask implements IDocumentMask { + + /** + * Constructs a new DocumentMask. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IDocumentMask); + + /** DocumentMask fieldPaths. */ + public fieldPaths: string[]; + + /** + * Creates a new DocumentMask instance using the specified properties. + * @param [properties] Properties to set + * @returns DocumentMask instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IDocumentMask): google.events.cloud.firestore.v1.DocumentMask; + + /** + * Encodes the specified DocumentMask message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @param message DocumentMask message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IDocumentMask, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DocumentMask message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @param message DocumentMask message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IDocumentMask, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DocumentMask message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.DocumentMask; + + /** + * Decodes a DocumentMask message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.DocumentMask; + + /** + * Verifies a DocumentMask message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DocumentMask message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DocumentMask + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.DocumentMask; + + /** + * Creates a plain object from a DocumentMask message. Also converts values to other types if specified. + * @param message DocumentMask + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.DocumentMask, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DocumentMask to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DocumentMask + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Document. */ + interface IDocument { + + /** Document name */ + name?: (string|null); + + /** Document fields */ + fields?: ({ [k: string]: google.events.cloud.firestore.v1.IValue }|null); + + /** Document createTime */ + createTime?: (google.protobuf.ITimestamp|null); + + /** Document updateTime */ + updateTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a Document. */ + class Document implements IDocument { + + /** + * Constructs a new Document. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IDocument); + + /** Document name. */ + public name: string; + + /** Document fields. */ + public fields: { [k: string]: google.events.cloud.firestore.v1.IValue }; + + /** Document createTime. */ + public createTime?: (google.protobuf.ITimestamp|null); + + /** Document updateTime. */ + public updateTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new Document instance using the specified properties. + * @param [properties] Properties to set + * @returns Document instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IDocument): google.events.cloud.firestore.v1.Document; + + /** + * Encodes the specified Document message. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @param message Document message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IDocument, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Document message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @param message Document message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IDocument, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Document message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.Document; + + /** + * Decodes a Document message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.Document; + + /** + * Verifies a Document message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Document message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Document + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.Document; + + /** + * Creates a plain object from a Document message. Also converts values to other types if specified. + * @param message Document + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.Document, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Document to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Document + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Value. */ + interface IValue { + + /** Value nullValue */ + nullValue?: (google.protobuf.NullValue|null); + + /** Value booleanValue */ + booleanValue?: (boolean|null); + + /** Value integerValue */ + integerValue?: (number|Long|null); + + /** Value doubleValue */ + doubleValue?: (number|null); + + /** Value timestampValue */ + timestampValue?: (google.protobuf.ITimestamp|null); + + /** Value stringValue */ + stringValue?: (string|null); + + /** Value bytesValue */ + bytesValue?: (Uint8Array|null); + + /** Value referenceValue */ + referenceValue?: (string|null); + + /** Value geoPointValue */ + geoPointValue?: (google.type.ILatLng|null); + + /** Value arrayValue */ + arrayValue?: (google.events.cloud.firestore.v1.IArrayValue|null); + + /** Value mapValue */ + mapValue?: (google.events.cloud.firestore.v1.IMapValue|null); + } + + /** Represents a Value. */ + class Value implements IValue { + + /** + * Constructs a new Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IValue); + + /** Value nullValue. */ + public nullValue?: (google.protobuf.NullValue|null); + + /** Value booleanValue. */ + public booleanValue?: (boolean|null); + + /** Value integerValue. */ + public integerValue?: (number|Long|null); + + /** Value doubleValue. */ + public doubleValue?: (number|null); + + /** Value timestampValue. */ + public timestampValue?: (google.protobuf.ITimestamp|null); + + /** Value stringValue. */ + public stringValue?: (string|null); + + /** Value bytesValue. */ + public bytesValue?: (Uint8Array|null); + + /** Value referenceValue. */ + public referenceValue?: (string|null); + + /** Value geoPointValue. */ + public geoPointValue?: (google.type.ILatLng|null); + + /** Value arrayValue. */ + public arrayValue?: (google.events.cloud.firestore.v1.IArrayValue|null); + + /** Value mapValue. */ + public mapValue?: (google.events.cloud.firestore.v1.IMapValue|null); + + /** Value valueType. */ + public valueType?: ("nullValue"|"booleanValue"|"integerValue"|"doubleValue"|"timestampValue"|"stringValue"|"bytesValue"|"referenceValue"|"geoPointValue"|"arrayValue"|"mapValue"); + + /** + * Creates a new Value instance using the specified properties. + * @param [properties] Properties to set + * @returns Value instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IValue): google.events.cloud.firestore.v1.Value; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @param message Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @param message Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.Value; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.Value; + + /** + * Verifies a Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Value + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.Value; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @param message Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an ArrayValue. */ + interface IArrayValue { + + /** ArrayValue values */ + values?: (google.events.cloud.firestore.v1.IValue[]|null); + } + + /** Represents an ArrayValue. */ + class ArrayValue implements IArrayValue { + + /** + * Constructs a new ArrayValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IArrayValue); + + /** ArrayValue values. */ + public values: google.events.cloud.firestore.v1.IValue[]; + + /** + * Creates a new ArrayValue instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrayValue instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IArrayValue): google.events.cloud.firestore.v1.ArrayValue; + + /** + * Encodes the specified ArrayValue message. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @param message ArrayValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IArrayValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrayValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @param message ArrayValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IArrayValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrayValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.ArrayValue; + + /** + * Decodes an ArrayValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.ArrayValue; + + /** + * Verifies an ArrayValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrayValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrayValue + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.ArrayValue; + + /** + * Creates a plain object from an ArrayValue message. Also converts values to other types if specified. + * @param message ArrayValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.ArrayValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrayValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrayValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MapValue. */ + interface IMapValue { + + /** MapValue fields */ + fields?: ({ [k: string]: google.events.cloud.firestore.v1.IValue }|null); + } + + /** Represents a MapValue. */ + class MapValue implements IMapValue { + + /** + * Constructs a new MapValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.events.cloud.firestore.v1.IMapValue); + + /** MapValue fields. */ + public fields: { [k: string]: google.events.cloud.firestore.v1.IValue }; + + /** + * Creates a new MapValue instance using the specified properties. + * @param [properties] Properties to set + * @returns MapValue instance + */ + public static create(properties?: google.events.cloud.firestore.v1.IMapValue): google.events.cloud.firestore.v1.MapValue; + + /** + * Encodes the specified MapValue message. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @param message MapValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.events.cloud.firestore.v1.IMapValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MapValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @param message MapValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.events.cloud.firestore.v1.IMapValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MapValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.events.cloud.firestore.v1.MapValue; + + /** + * Decodes a MapValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.events.cloud.firestore.v1.MapValue; + + /** + * Verifies a MapValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MapValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MapValue + */ + public static fromObject(object: { [k: string]: any }): google.events.cloud.firestore.v1.MapValue; + + /** + * Creates a plain object from a MapValue message. Also converts values to other types if specified. + * @param message MapValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.events.cloud.firestore.v1.MapValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MapValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MapValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + } + } + } + + /** Namespace type. */ + namespace type { + + /** Properties of a LatLng. */ + interface ILatLng { + + /** LatLng latitude */ + latitude?: (number|null); + + /** LatLng longitude */ + longitude?: (number|null); + } + + /** Represents a LatLng. */ + class LatLng implements ILatLng { + + /** + * Constructs a new LatLng. + * @param [properties] Properties to set + */ + constructor(properties?: google.type.ILatLng); + + /** LatLng latitude. */ + public latitude: number; + + /** LatLng longitude. */ + public longitude: number; + + /** + * Creates a new LatLng instance using the specified properties. + * @param [properties] Properties to set + * @returns LatLng instance + */ + public static create(properties?: google.type.ILatLng): google.type.LatLng; + + /** + * Encodes the specified LatLng message. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @param message LatLng message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.type.ILatLng, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified LatLng message, length delimited. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @param message LatLng message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.type.ILatLng, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a LatLng message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.type.LatLng; + + /** + * Decodes a LatLng message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.type.LatLng; + + /** + * Verifies a LatLng message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a LatLng message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns LatLng + */ + public static fromObject(object: { [k: string]: any }): google.type.LatLng; + + /** + * Creates a plain object from a LatLng message. Also converts values to other types if specified. + * @param message LatLng + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.type.LatLng, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this LatLng to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for LatLng + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } +} diff --git a/protos/compiledFirestore.js b/protos/compiledFirestore.js new file mode 100644 index 000000000..32a90fc09 --- /dev/null +++ b/protos/compiledFirestore.js @@ -0,0 +1,3514 @@ +/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/ +"use strict"; + +var $protobuf = require("protobufjs/minimal"); + +// Common aliases +var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + +// Exported root namespace +var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + +$root.google = (function() { + + /** + * Namespace google. + * @exports google + * @namespace + */ + var google = {}; + + google.protobuf = (function() { + + /** + * Namespace protobuf. + * @memberof google + * @namespace + */ + var protobuf = {}; + + protobuf.Struct = (function() { + + /** + * Properties of a Struct. + * @memberof google.protobuf + * @interface IStruct + * @property {Object.|null} [fields] Struct fields + */ + + /** + * Constructs a new Struct. + * @memberof google.protobuf + * @classdesc Represents a Struct. + * @implements IStruct + * @constructor + * @param {google.protobuf.IStruct=} [properties] Properties to set + */ + function Struct(properties) { + this.fields = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Struct fields. + * @member {Object.} fields + * @memberof google.protobuf.Struct + * @instance + */ + Struct.prototype.fields = $util.emptyObject; + + /** + * Creates a new Struct instance using the specified properties. + * @function create + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct=} [properties] Properties to set + * @returns {google.protobuf.Struct} Struct instance + */ + Struct.create = function create(properties) { + return new Struct(properties); + }; + + /** + * Encodes the specified Struct message. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct} message Struct message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Struct.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (var keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 1, wireType 2 =*/10).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.protobuf.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + return writer; + }; + + /** + * Encodes the specified Struct message, length delimited. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct} message Struct message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Struct.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Struct message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Struct + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Struct} Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Struct.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Struct(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (message.fields === $util.emptyObject) + message.fields = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.protobuf.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Struct message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Struct + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Struct} Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Struct.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Struct message. + * @function verify + * @memberof google.protobuf.Struct + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Struct.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + var key = Object.keys(message.fields); + for (var i = 0; i < key.length; ++i) { + var error = $root.google.protobuf.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a Struct message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Struct + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Struct} Struct + */ + Struct.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Struct) + return object; + var message = new $root.google.protobuf.Struct(); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.protobuf.Struct.fields: object expected"); + message.fields = {}; + for (var keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.protobuf.Struct.fields: object expected"); + message.fields[keys[i]] = $root.google.protobuf.Value.fromObject(object.fields[keys[i]]); + } + } + return message; + }; + + /** + * Creates a plain object from a Struct message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.Struct} message Struct + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Struct.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.fields = {}; + var keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (var j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.protobuf.Value.toObject(message.fields[keys2[j]], options); + } + return object; + }; + + /** + * Converts this Struct to JSON. + * @function toJSON + * @memberof google.protobuf.Struct + * @instance + * @returns {Object.} JSON object + */ + Struct.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Struct + * @function getTypeUrl + * @memberof google.protobuf.Struct + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Struct.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Struct"; + }; + + return Struct; + })(); + + protobuf.Value = (function() { + + /** + * Properties of a Value. + * @memberof google.protobuf + * @interface IValue + * @property {google.protobuf.NullValue|null} [nullValue] Value nullValue + * @property {number|null} [numberValue] Value numberValue + * @property {string|null} [stringValue] Value stringValue + * @property {boolean|null} [boolValue] Value boolValue + * @property {google.protobuf.IStruct|null} [structValue] Value structValue + * @property {google.protobuf.IListValue|null} [listValue] Value listValue + */ + + /** + * Constructs a new Value. + * @memberof google.protobuf + * @classdesc Represents a Value. + * @implements IValue + * @constructor + * @param {google.protobuf.IValue=} [properties] Properties to set + */ + function Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Value nullValue. + * @member {google.protobuf.NullValue|null|undefined} nullValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.nullValue = null; + + /** + * Value numberValue. + * @member {number|null|undefined} numberValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.numberValue = null; + + /** + * Value stringValue. + * @member {string|null|undefined} stringValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.stringValue = null; + + /** + * Value boolValue. + * @member {boolean|null|undefined} boolValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.boolValue = null; + + /** + * Value structValue. + * @member {google.protobuf.IStruct|null|undefined} structValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.structValue = null; + + /** + * Value listValue. + * @member {google.protobuf.IListValue|null|undefined} listValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.listValue = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * Value kind. + * @member {"nullValue"|"numberValue"|"stringValue"|"boolValue"|"structValue"|"listValue"|undefined} kind + * @memberof google.protobuf.Value + * @instance + */ + Object.defineProperty(Value.prototype, "kind", { + get: $util.oneOfGetter($oneOfFields = ["nullValue", "numberValue", "stringValue", "boolValue", "structValue", "listValue"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new Value instance using the specified properties. + * @function create + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue=} [properties] Properties to set + * @returns {google.protobuf.Value} Value instance + */ + Value.create = function create(properties) { + return new Value(properties); + }; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.nullValue != null && Object.hasOwnProperty.call(message, "nullValue")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.nullValue); + if (message.numberValue != null && Object.hasOwnProperty.call(message, "numberValue")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.numberValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.stringValue); + if (message.boolValue != null && Object.hasOwnProperty.call(message, "boolValue")) + writer.uint32(/* id 4, wireType 0 =*/32).bool(message.boolValue); + if (message.structValue != null && Object.hasOwnProperty.call(message, "structValue")) + $root.google.protobuf.Struct.encode(message.structValue, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.listValue != null && Object.hasOwnProperty.call(message, "listValue")) + $root.google.protobuf.ListValue.encode(message.listValue, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.nullValue = reader.int32(); + break; + } + case 2: { + message.numberValue = reader.double(); + break; + } + case 3: { + message.stringValue = reader.string(); + break; + } + case 4: { + message.boolValue = reader.bool(); + break; + } + case 5: { + message.structValue = $root.google.protobuf.Struct.decode(reader, reader.uint32()); + break; + } + case 6: { + message.listValue = $root.google.protobuf.ListValue.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Value message. + * @function verify + * @memberof google.protobuf.Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + properties.kind = 1; + switch (message.nullValue) { + default: + return "nullValue: enum value expected"; + case 0: + break; + } + } + if (message.numberValue != null && message.hasOwnProperty("numberValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (typeof message.numberValue !== "number") + return "numberValue: number expected"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (!$util.isString(message.stringValue)) + return "stringValue: string expected"; + } + if (message.boolValue != null && message.hasOwnProperty("boolValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (typeof message.boolValue !== "boolean") + return "boolValue: boolean expected"; + } + if (message.structValue != null && message.hasOwnProperty("structValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + { + var error = $root.google.protobuf.Struct.verify(message.structValue); + if (error) + return "structValue." + error; + } + } + if (message.listValue != null && message.hasOwnProperty("listValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + { + var error = $root.google.protobuf.ListValue.verify(message.listValue); + if (error) + return "listValue." + error; + } + } + return null; + }; + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Value} Value + */ + Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Value) + return object; + var message = new $root.google.protobuf.Value(); + switch (object.nullValue) { + default: + if (typeof object.nullValue === "number") { + message.nullValue = object.nullValue; + break; + } + break; + case "NULL_VALUE": + case 0: + message.nullValue = 0; + break; + } + if (object.numberValue != null) + message.numberValue = Number(object.numberValue); + if (object.stringValue != null) + message.stringValue = String(object.stringValue); + if (object.boolValue != null) + message.boolValue = Boolean(object.boolValue); + if (object.structValue != null) { + if (typeof object.structValue !== "object") + throw TypeError(".google.protobuf.Value.structValue: object expected"); + message.structValue = $root.google.protobuf.Struct.fromObject(object.structValue); + } + if (object.listValue != null) { + if (typeof object.listValue !== "object") + throw TypeError(".google.protobuf.Value.listValue: object expected"); + message.listValue = $root.google.protobuf.ListValue.fromObject(object.listValue); + } + return message; + }; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.Value} message Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + object.nullValue = options.enums === String ? $root.google.protobuf.NullValue[message.nullValue] === undefined ? message.nullValue : $root.google.protobuf.NullValue[message.nullValue] : message.nullValue; + if (options.oneofs) + object.kind = "nullValue"; + } + if (message.numberValue != null && message.hasOwnProperty("numberValue")) { + object.numberValue = options.json && !isFinite(message.numberValue) ? String(message.numberValue) : message.numberValue; + if (options.oneofs) + object.kind = "numberValue"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + object.stringValue = message.stringValue; + if (options.oneofs) + object.kind = "stringValue"; + } + if (message.boolValue != null && message.hasOwnProperty("boolValue")) { + object.boolValue = message.boolValue; + if (options.oneofs) + object.kind = "boolValue"; + } + if (message.structValue != null && message.hasOwnProperty("structValue")) { + object.structValue = $root.google.protobuf.Struct.toObject(message.structValue, options); + if (options.oneofs) + object.kind = "structValue"; + } + if (message.listValue != null && message.hasOwnProperty("listValue")) { + object.listValue = $root.google.protobuf.ListValue.toObject(message.listValue, options); + if (options.oneofs) + object.kind = "listValue"; + } + return object; + }; + + /** + * Converts this Value to JSON. + * @function toJSON + * @memberof google.protobuf.Value + * @instance + * @returns {Object.} JSON object + */ + Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Value + * @function getTypeUrl + * @memberof google.protobuf.Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Value"; + }; + + return Value; + })(); + + /** + * NullValue enum. + * @name google.protobuf.NullValue + * @enum {number} + * @property {number} NULL_VALUE=0 NULL_VALUE value + */ + protobuf.NullValue = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "NULL_VALUE"] = 0; + return values; + })(); + + protobuf.ListValue = (function() { + + /** + * Properties of a ListValue. + * @memberof google.protobuf + * @interface IListValue + * @property {Array.|null} [values] ListValue values + */ + + /** + * Constructs a new ListValue. + * @memberof google.protobuf + * @classdesc Represents a ListValue. + * @implements IListValue + * @constructor + * @param {google.protobuf.IListValue=} [properties] Properties to set + */ + function ListValue(properties) { + this.values = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListValue values. + * @member {Array.} values + * @memberof google.protobuf.ListValue + * @instance + */ + ListValue.prototype.values = $util.emptyArray; + + /** + * Creates a new ListValue instance using the specified properties. + * @function create + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue=} [properties] Properties to set + * @returns {google.protobuf.ListValue} ListValue instance + */ + ListValue.create = function create(properties) { + return new ListValue(properties); + }; + + /** + * Encodes the specified ListValue message. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue} message ListValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + $root.google.protobuf.Value.encode(message.values[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ListValue message, length delimited. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue} message ListValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ListValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ListValue} ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ListValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push($root.google.protobuf.Value.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ListValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ListValue} ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListValue message. + * @function verify + * @memberof google.protobuf.ListValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) { + var error = $root.google.protobuf.Value.verify(message.values[i]); + if (error) + return "values." + error; + } + } + return null; + }; + + /** + * Creates a ListValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ListValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ListValue} ListValue + */ + ListValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ListValue) + return object; + var message = new $root.google.protobuf.ListValue(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.protobuf.ListValue.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) { + if (typeof object.values[i] !== "object") + throw TypeError(".google.protobuf.ListValue.values: object expected"); + message.values[i] = $root.google.protobuf.Value.fromObject(object.values[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a ListValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.ListValue} message ListValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = $root.google.protobuf.Value.toObject(message.values[j], options); + } + return object; + }; + + /** + * Converts this ListValue to JSON. + * @function toJSON + * @memberof google.protobuf.ListValue + * @instance + * @returns {Object.} JSON object + */ + ListValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListValue + * @function getTypeUrl + * @memberof google.protobuf.ListValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ListValue"; + }; + + return ListValue; + })(); + + protobuf.Timestamp = (function() { + + /** + * Properties of a Timestamp. + * @memberof google.protobuf + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos + */ + + /** + * Constructs a new Timestamp. + * @memberof google.protobuf + * @classdesc Represents a Timestamp. + * @implements ITimestamp + * @constructor + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + */ + function Timestamp(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance + */ + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + + return Timestamp; + })(); + + protobuf.Any = (function() { + + /** + * Properties of an Any. + * @memberof google.protobuf + * @interface IAny + * @property {string|null} [typeUrl] Any typeUrl + * @property {Uint8Array|null} [value] Any value + */ + + /** + * Constructs a new Any. + * @memberof google.protobuf + * @classdesc Represents an Any. + * @implements IAny + * @constructor + * @param {google.protobuf.IAny=} [properties] Properties to set + */ + function Any(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Any typeUrl. + * @member {string} typeUrl + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.typeUrl = ""; + + /** + * Any value. + * @member {Uint8Array} value + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.value = $util.newBuffer([]); + + /** + * Creates a new Any instance using the specified properties. + * @function create + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny=} [properties] Properties to set + * @returns {google.protobuf.Any} Any instance + */ + Any.create = function create(properties) { + return new Any(properties); + }; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.typeUrl != null && Object.hasOwnProperty.call(message, "typeUrl")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.typeUrl); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); + return writer; + }; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Any message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.typeUrl = reader.string(); + break; + } + case 2: { + message.value = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Any message. + * @function verify + * @memberof google.protobuf.Any + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Any.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.typeUrl != null && message.hasOwnProperty("typeUrl")) + if (!$util.isString(message.typeUrl)) + return "typeUrl: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; + return null; + }; + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Any + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Any} Any + */ + Any.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Any) + return object; + var message = new $root.google.protobuf.Any(); + if (object.typeUrl != null) + message.typeUrl = String(object.typeUrl); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length >= 0) + message.value = object.value; + return message; + }; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.Any} message Any + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Any.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.typeUrl = ""; + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } + } + if (message.typeUrl != null && message.hasOwnProperty("typeUrl")) + object.typeUrl = message.typeUrl; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; + return object; + }; + + /** + * Converts this Any to JSON. + * @function toJSON + * @memberof google.protobuf.Any + * @instance + * @returns {Object.} JSON object + */ + Any.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Any + * @function getTypeUrl + * @memberof google.protobuf.Any + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Any.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Any"; + }; + + return Any; + })(); + + return protobuf; + })(); + + google.events = (function() { + + /** + * Namespace events. + * @memberof google + * @namespace + */ + var events = {}; + + events.cloud = (function() { + + /** + * Namespace cloud. + * @memberof google.events + * @namespace + */ + var cloud = {}; + + cloud.firestore = (function() { + + /** + * Namespace firestore. + * @memberof google.events.cloud + * @namespace + */ + var firestore = {}; + + firestore.v1 = (function() { + + /** + * Namespace v1. + * @memberof google.events.cloud.firestore + * @namespace + */ + var v1 = {}; + + v1.DocumentEventData = (function() { + + /** + * Properties of a DocumentEventData. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocumentEventData + * @property {google.events.cloud.firestore.v1.IDocument|null} [value] DocumentEventData value + * @property {google.events.cloud.firestore.v1.IDocument|null} [oldValue] DocumentEventData oldValue + * @property {google.events.cloud.firestore.v1.IDocumentMask|null} [updateMask] DocumentEventData updateMask + */ + + /** + * Constructs a new DocumentEventData. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a DocumentEventData. + * @implements IDocumentEventData + * @constructor + * @param {google.events.cloud.firestore.v1.IDocumentEventData=} [properties] Properties to set + */ + function DocumentEventData(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DocumentEventData value. + * @member {google.events.cloud.firestore.v1.IDocument|null|undefined} value + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.value = null; + + /** + * DocumentEventData oldValue. + * @member {google.events.cloud.firestore.v1.IDocument|null|undefined} oldValue + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.oldValue = null; + + /** + * DocumentEventData updateMask. + * @member {google.events.cloud.firestore.v1.IDocumentMask|null|undefined} updateMask + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.updateMask = null; + + /** + * Creates a new DocumentEventData instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData instance + */ + DocumentEventData.create = function create(properties) { + return new DocumentEventData(properties); + }; + + /** + * Encodes the specified DocumentEventData message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData} message DocumentEventData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentEventData.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + $root.google.events.cloud.firestore.v1.Document.encode(message.value, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.oldValue != null && Object.hasOwnProperty.call(message, "oldValue")) + $root.google.events.cloud.firestore.v1.Document.encode(message.oldValue, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.updateMask != null && Object.hasOwnProperty.call(message, "updateMask")) + $root.google.events.cloud.firestore.v1.DocumentMask.encode(message.updateMask, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified DocumentEventData message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData} message DocumentEventData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentEventData.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentEventData.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.DocumentEventData(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = $root.google.events.cloud.firestore.v1.Document.decode(reader, reader.uint32()); + break; + } + case 2: { + message.oldValue = $root.google.events.cloud.firestore.v1.Document.decode(reader, reader.uint32()); + break; + } + case 3: { + message.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentEventData.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DocumentEventData message. + * @function verify + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DocumentEventData.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) { + var error = $root.google.events.cloud.firestore.v1.Document.verify(message.value); + if (error) + return "value." + error; + } + if (message.oldValue != null && message.hasOwnProperty("oldValue")) { + var error = $root.google.events.cloud.firestore.v1.Document.verify(message.oldValue); + if (error) + return "oldValue." + error; + } + if (message.updateMask != null && message.hasOwnProperty("updateMask")) { + var error = $root.google.events.cloud.firestore.v1.DocumentMask.verify(message.updateMask); + if (error) + return "updateMask." + error; + } + return null; + }; + + /** + * Creates a DocumentEventData message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + */ + DocumentEventData.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.DocumentEventData) + return object; + var message = new $root.google.events.cloud.firestore.v1.DocumentEventData(); + if (object.value != null) { + if (typeof object.value !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.value: object expected"); + message.value = $root.google.events.cloud.firestore.v1.Document.fromObject(object.value); + } + if (object.oldValue != null) { + if (typeof object.oldValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.oldValue: object expected"); + message.oldValue = $root.google.events.cloud.firestore.v1.Document.fromObject(object.oldValue); + } + if (object.updateMask != null) { + if (typeof object.updateMask !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.updateMask: object expected"); + message.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.fromObject(object.updateMask); + } + return message; + }; + + /** + * Creates a plain object from a DocumentEventData message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.DocumentEventData} message DocumentEventData + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DocumentEventData.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.value = null; + object.oldValue = null; + object.updateMask = null; + } + if (message.value != null && message.hasOwnProperty("value")) + object.value = $root.google.events.cloud.firestore.v1.Document.toObject(message.value, options); + if (message.oldValue != null && message.hasOwnProperty("oldValue")) + object.oldValue = $root.google.events.cloud.firestore.v1.Document.toObject(message.oldValue, options); + if (message.updateMask != null && message.hasOwnProperty("updateMask")) + object.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.toObject(message.updateMask, options); + return object; + }; + + /** + * Converts this DocumentEventData to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + * @returns {Object.} JSON object + */ + DocumentEventData.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DocumentEventData + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DocumentEventData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.DocumentEventData"; + }; + + return DocumentEventData; + })(); + + v1.DocumentMask = (function() { + + /** + * Properties of a DocumentMask. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocumentMask + * @property {Array.|null} [fieldPaths] DocumentMask fieldPaths + */ + + /** + * Constructs a new DocumentMask. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a DocumentMask. + * @implements IDocumentMask + * @constructor + * @param {google.events.cloud.firestore.v1.IDocumentMask=} [properties] Properties to set + */ + function DocumentMask(properties) { + this.fieldPaths = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DocumentMask fieldPaths. + * @member {Array.} fieldPaths + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @instance + */ + DocumentMask.prototype.fieldPaths = $util.emptyArray; + + /** + * Creates a new DocumentMask instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask instance + */ + DocumentMask.create = function create(properties) { + return new DocumentMask(properties); + }; + + /** + * Encodes the specified DocumentMask message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask} message DocumentMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentMask.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fieldPaths != null && message.fieldPaths.length) + for (var i = 0; i < message.fieldPaths.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.fieldPaths[i]); + return writer; + }; + + /** + * Encodes the specified DocumentMask message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask} message DocumentMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentMask.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DocumentMask message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentMask.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.DocumentMask(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.fieldPaths && message.fieldPaths.length)) + message.fieldPaths = []; + message.fieldPaths.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DocumentMask message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentMask.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DocumentMask message. + * @function verify + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DocumentMask.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fieldPaths != null && message.hasOwnProperty("fieldPaths")) { + if (!Array.isArray(message.fieldPaths)) + return "fieldPaths: array expected"; + for (var i = 0; i < message.fieldPaths.length; ++i) + if (!$util.isString(message.fieldPaths[i])) + return "fieldPaths: string[] expected"; + } + return null; + }; + + /** + * Creates a DocumentMask message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + */ + DocumentMask.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.DocumentMask) + return object; + var message = new $root.google.events.cloud.firestore.v1.DocumentMask(); + if (object.fieldPaths) { + if (!Array.isArray(object.fieldPaths)) + throw TypeError(".google.events.cloud.firestore.v1.DocumentMask.fieldPaths: array expected"); + message.fieldPaths = []; + for (var i = 0; i < object.fieldPaths.length; ++i) + message.fieldPaths[i] = String(object.fieldPaths[i]); + } + return message; + }; + + /** + * Creates a plain object from a DocumentMask message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.DocumentMask} message DocumentMask + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DocumentMask.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.fieldPaths = []; + if (message.fieldPaths && message.fieldPaths.length) { + object.fieldPaths = []; + for (var j = 0; j < message.fieldPaths.length; ++j) + object.fieldPaths[j] = message.fieldPaths[j]; + } + return object; + }; + + /** + * Converts this DocumentMask to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @instance + * @returns {Object.} JSON object + */ + DocumentMask.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DocumentMask + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DocumentMask.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.DocumentMask"; + }; + + return DocumentMask; + })(); + + v1.Document = (function() { + + /** + * Properties of a Document. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocument + * @property {string|null} [name] Document name + * @property {Object.|null} [fields] Document fields + * @property {google.protobuf.ITimestamp|null} [createTime] Document createTime + * @property {google.protobuf.ITimestamp|null} [updateTime] Document updateTime + */ + + /** + * Constructs a new Document. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a Document. + * @implements IDocument + * @constructor + * @param {google.events.cloud.firestore.v1.IDocument=} [properties] Properties to set + */ + function Document(properties) { + this.fields = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Document name. + * @member {string} name + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.name = ""; + + /** + * Document fields. + * @member {Object.} fields + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.fields = $util.emptyObject; + + /** + * Document createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.createTime = null; + + /** + * Document updateTime. + * @member {google.protobuf.ITimestamp|null|undefined} updateTime + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.updateTime = null; + + /** + * Creates a new Document instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.Document} Document instance + */ + Document.create = function create(properties) { + return new Document(properties); + }; + + /** + * Encodes the specified Document message. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument} message Document message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Document.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (var keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.events.cloud.firestore.v1.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.updateTime != null && Object.hasOwnProperty.call(message, "updateTime")) + $root.google.protobuf.Timestamp.encode(message.updateTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Document message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument} message Document message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Document.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Document message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.Document} Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Document.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.Document(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (message.fields === $util.emptyObject) + message.fields = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + case 3: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 4: { + message.updateTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Document message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.Document} Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Document.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Document message. + * @function verify + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Document.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + var key = Object.keys(message.fields); + for (var i = 0; i < key.length; ++i) { + var error = $root.google.events.cloud.firestore.v1.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.createTime); + if (error) + return "createTime." + error; + } + if (message.updateTime != null && message.hasOwnProperty("updateTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.updateTime); + if (error) + return "updateTime." + error; + } + return null; + }; + + /** + * Creates a Document message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.Document} Document + */ + Document.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.Document) + return object; + var message = new $root.google.events.cloud.firestore.v1.Document(); + if (object.name != null) + message.name = String(object.name); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.fields: object expected"); + message.fields = {}; + for (var keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.fields: object expected"); + message.fields[keys[i]] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.fields[keys[i]]); + } + } + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.updateTime != null) { + if (typeof object.updateTime !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.updateTime: object expected"); + message.updateTime = $root.google.protobuf.Timestamp.fromObject(object.updateTime); + } + return message; + }; + + /** + * Creates a plain object from a Document message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.Document} message Document + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Document.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.fields = {}; + if (options.defaults) { + object.name = ""; + object.createTime = null; + object.updateTime = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + var keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (var j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.events.cloud.firestore.v1.Value.toObject(message.fields[keys2[j]], options); + } + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.updateTime != null && message.hasOwnProperty("updateTime")) + object.updateTime = $root.google.protobuf.Timestamp.toObject(message.updateTime, options); + return object; + }; + + /** + * Converts this Document to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.Document + * @instance + * @returns {Object.} JSON object + */ + Document.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Document + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Document.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.Document"; + }; + + return Document; + })(); + + v1.Value = (function() { + + /** + * Properties of a Value. + * @memberof google.events.cloud.firestore.v1 + * @interface IValue + * @property {google.protobuf.NullValue|null} [nullValue] Value nullValue + * @property {boolean|null} [booleanValue] Value booleanValue + * @property {number|Long|null} [integerValue] Value integerValue + * @property {number|null} [doubleValue] Value doubleValue + * @property {google.protobuf.ITimestamp|null} [timestampValue] Value timestampValue + * @property {string|null} [stringValue] Value stringValue + * @property {Uint8Array|null} [bytesValue] Value bytesValue + * @property {string|null} [referenceValue] Value referenceValue + * @property {google.type.ILatLng|null} [geoPointValue] Value geoPointValue + * @property {google.events.cloud.firestore.v1.IArrayValue|null} [arrayValue] Value arrayValue + * @property {google.events.cloud.firestore.v1.IMapValue|null} [mapValue] Value mapValue + */ + + /** + * Constructs a new Value. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a Value. + * @implements IValue + * @constructor + * @param {google.events.cloud.firestore.v1.IValue=} [properties] Properties to set + */ + function Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Value nullValue. + * @member {google.protobuf.NullValue|null|undefined} nullValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.nullValue = null; + + /** + * Value booleanValue. + * @member {boolean|null|undefined} booleanValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.booleanValue = null; + + /** + * Value integerValue. + * @member {number|Long|null|undefined} integerValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.integerValue = null; + + /** + * Value doubleValue. + * @member {number|null|undefined} doubleValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.doubleValue = null; + + /** + * Value timestampValue. + * @member {google.protobuf.ITimestamp|null|undefined} timestampValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.timestampValue = null; + + /** + * Value stringValue. + * @member {string|null|undefined} stringValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.stringValue = null; + + /** + * Value bytesValue. + * @member {Uint8Array|null|undefined} bytesValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.bytesValue = null; + + /** + * Value referenceValue. + * @member {string|null|undefined} referenceValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.referenceValue = null; + + /** + * Value geoPointValue. + * @member {google.type.ILatLng|null|undefined} geoPointValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.geoPointValue = null; + + /** + * Value arrayValue. + * @member {google.events.cloud.firestore.v1.IArrayValue|null|undefined} arrayValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.arrayValue = null; + + /** + * Value mapValue. + * @member {google.events.cloud.firestore.v1.IMapValue|null|undefined} mapValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.mapValue = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * Value valueType. + * @member {"nullValue"|"booleanValue"|"integerValue"|"doubleValue"|"timestampValue"|"stringValue"|"bytesValue"|"referenceValue"|"geoPointValue"|"arrayValue"|"mapValue"|undefined} valueType + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Object.defineProperty(Value.prototype, "valueType", { + get: $util.oneOfGetter($oneOfFields = ["nullValue", "booleanValue", "integerValue", "doubleValue", "timestampValue", "stringValue", "bytesValue", "referenceValue", "geoPointValue", "arrayValue", "mapValue"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new Value instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.Value} Value instance + */ + Value.create = function create(properties) { + return new Value(properties); + }; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.booleanValue != null && Object.hasOwnProperty.call(message, "booleanValue")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.booleanValue); + if (message.integerValue != null && Object.hasOwnProperty.call(message, "integerValue")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.integerValue); + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) + writer.uint32(/* id 3, wireType 1 =*/25).double(message.doubleValue); + if (message.referenceValue != null && Object.hasOwnProperty.call(message, "referenceValue")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.referenceValue); + if (message.mapValue != null && Object.hasOwnProperty.call(message, "mapValue")) + $root.google.events.cloud.firestore.v1.MapValue.encode(message.mapValue, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.geoPointValue != null && Object.hasOwnProperty.call(message, "geoPointValue")) + $root.google.type.LatLng.encode(message.geoPointValue, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.arrayValue != null && Object.hasOwnProperty.call(message, "arrayValue")) + $root.google.events.cloud.firestore.v1.ArrayValue.encode(message.arrayValue, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.timestampValue != null && Object.hasOwnProperty.call(message, "timestampValue")) + $root.google.protobuf.Timestamp.encode(message.timestampValue, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.nullValue != null && Object.hasOwnProperty.call(message, "nullValue")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.nullValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 17, wireType 2 =*/138).string(message.stringValue); + if (message.bytesValue != null && Object.hasOwnProperty.call(message, "bytesValue")) + writer.uint32(/* id 18, wireType 2 =*/146).bytes(message.bytesValue); + return writer; + }; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Value message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 11: { + message.nullValue = reader.int32(); + break; + } + case 1: { + message.booleanValue = reader.bool(); + break; + } + case 2: { + message.integerValue = reader.int64(); + break; + } + case 3: { + message.doubleValue = reader.double(); + break; + } + case 10: { + message.timestampValue = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 17: { + message.stringValue = reader.string(); + break; + } + case 18: { + message.bytesValue = reader.bytes(); + break; + } + case 5: { + message.referenceValue = reader.string(); + break; + } + case 8: { + message.geoPointValue = $root.google.type.LatLng.decode(reader, reader.uint32()); + break; + } + case 9: { + message.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.decode(reader, reader.uint32()); + break; + } + case 6: { + message.mapValue = $root.google.events.cloud.firestore.v1.MapValue.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Value message. + * @function verify + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + properties.valueType = 1; + switch (message.nullValue) { + default: + return "nullValue: enum value expected"; + case 0: + break; + } + } + if (message.booleanValue != null && message.hasOwnProperty("booleanValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (typeof message.booleanValue !== "boolean") + return "booleanValue: boolean expected"; + } + if (message.integerValue != null && message.hasOwnProperty("integerValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isInteger(message.integerValue) && !(message.integerValue && $util.isInteger(message.integerValue.low) && $util.isInteger(message.integerValue.high))) + return "integerValue: integer|Long expected"; + } + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (typeof message.doubleValue !== "number") + return "doubleValue: number expected"; + } + if (message.timestampValue != null && message.hasOwnProperty("timestampValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + var error = $root.google.protobuf.Timestamp.verify(message.timestampValue); + if (error) + return "timestampValue." + error; + } + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isString(message.stringValue)) + return "stringValue: string expected"; + } + if (message.bytesValue != null && message.hasOwnProperty("bytesValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!(message.bytesValue && typeof message.bytesValue.length === "number" || $util.isString(message.bytesValue))) + return "bytesValue: buffer expected"; + } + if (message.referenceValue != null && message.hasOwnProperty("referenceValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isString(message.referenceValue)) + return "referenceValue: string expected"; + } + if (message.geoPointValue != null && message.hasOwnProperty("geoPointValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + var error = $root.google.type.LatLng.verify(message.geoPointValue); + if (error) + return "geoPointValue." + error; + } + } + if (message.arrayValue != null && message.hasOwnProperty("arrayValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + var error = $root.google.events.cloud.firestore.v1.ArrayValue.verify(message.arrayValue); + if (error) + return "arrayValue." + error; + } + } + if (message.mapValue != null && message.hasOwnProperty("mapValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + var error = $root.google.events.cloud.firestore.v1.MapValue.verify(message.mapValue); + if (error) + return "mapValue." + error; + } + } + return null; + }; + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.Value} Value + */ + Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.Value) + return object; + var message = new $root.google.events.cloud.firestore.v1.Value(); + switch (object.nullValue) { + default: + if (typeof object.nullValue === "number") { + message.nullValue = object.nullValue; + break; + } + break; + case "NULL_VALUE": + case 0: + message.nullValue = 0; + break; + } + if (object.booleanValue != null) + message.booleanValue = Boolean(object.booleanValue); + if (object.integerValue != null) + if ($util.Long) + (message.integerValue = $util.Long.fromValue(object.integerValue)).unsigned = false; + else if (typeof object.integerValue === "string") + message.integerValue = parseInt(object.integerValue, 10); + else if (typeof object.integerValue === "number") + message.integerValue = object.integerValue; + else if (typeof object.integerValue === "object") + message.integerValue = new $util.LongBits(object.integerValue.low >>> 0, object.integerValue.high >>> 0).toNumber(); + if (object.doubleValue != null) + message.doubleValue = Number(object.doubleValue); + if (object.timestampValue != null) { + if (typeof object.timestampValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.timestampValue: object expected"); + message.timestampValue = $root.google.protobuf.Timestamp.fromObject(object.timestampValue); + } + if (object.stringValue != null) + message.stringValue = String(object.stringValue); + if (object.bytesValue != null) + if (typeof object.bytesValue === "string") + $util.base64.decode(object.bytesValue, message.bytesValue = $util.newBuffer($util.base64.length(object.bytesValue)), 0); + else if (object.bytesValue.length >= 0) + message.bytesValue = object.bytesValue; + if (object.referenceValue != null) + message.referenceValue = String(object.referenceValue); + if (object.geoPointValue != null) { + if (typeof object.geoPointValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.geoPointValue: object expected"); + message.geoPointValue = $root.google.type.LatLng.fromObject(object.geoPointValue); + } + if (object.arrayValue != null) { + if (typeof object.arrayValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.arrayValue: object expected"); + message.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.fromObject(object.arrayValue); + } + if (object.mapValue != null) { + if (typeof object.mapValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.mapValue: object expected"); + message.mapValue = $root.google.events.cloud.firestore.v1.MapValue.fromObject(object.mapValue); + } + return message; + }; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.Value} message Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (message.booleanValue != null && message.hasOwnProperty("booleanValue")) { + object.booleanValue = message.booleanValue; + if (options.oneofs) + object.valueType = "booleanValue"; + } + if (message.integerValue != null && message.hasOwnProperty("integerValue")) { + if (typeof message.integerValue === "number") + object.integerValue = options.longs === String ? String(message.integerValue) : message.integerValue; + else + object.integerValue = options.longs === String ? $util.Long.prototype.toString.call(message.integerValue) : options.longs === Number ? new $util.LongBits(message.integerValue.low >>> 0, message.integerValue.high >>> 0).toNumber() : message.integerValue; + if (options.oneofs) + object.valueType = "integerValue"; + } + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) { + object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; + if (options.oneofs) + object.valueType = "doubleValue"; + } + if (message.referenceValue != null && message.hasOwnProperty("referenceValue")) { + object.referenceValue = message.referenceValue; + if (options.oneofs) + object.valueType = "referenceValue"; + } + if (message.mapValue != null && message.hasOwnProperty("mapValue")) { + object.mapValue = $root.google.events.cloud.firestore.v1.MapValue.toObject(message.mapValue, options); + if (options.oneofs) + object.valueType = "mapValue"; + } + if (message.geoPointValue != null && message.hasOwnProperty("geoPointValue")) { + object.geoPointValue = $root.google.type.LatLng.toObject(message.geoPointValue, options); + if (options.oneofs) + object.valueType = "geoPointValue"; + } + if (message.arrayValue != null && message.hasOwnProperty("arrayValue")) { + object.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.toObject(message.arrayValue, options); + if (options.oneofs) + object.valueType = "arrayValue"; + } + if (message.timestampValue != null && message.hasOwnProperty("timestampValue")) { + object.timestampValue = $root.google.protobuf.Timestamp.toObject(message.timestampValue, options); + if (options.oneofs) + object.valueType = "timestampValue"; + } + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + object.nullValue = options.enums === String ? $root.google.protobuf.NullValue[message.nullValue] === undefined ? message.nullValue : $root.google.protobuf.NullValue[message.nullValue] : message.nullValue; + if (options.oneofs) + object.valueType = "nullValue"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + object.stringValue = message.stringValue; + if (options.oneofs) + object.valueType = "stringValue"; + } + if (message.bytesValue != null && message.hasOwnProperty("bytesValue")) { + object.bytesValue = options.bytes === String ? $util.base64.encode(message.bytesValue, 0, message.bytesValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.bytesValue) : message.bytesValue; + if (options.oneofs) + object.valueType = "bytesValue"; + } + return object; + }; + + /** + * Converts this Value to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.Value + * @instance + * @returns {Object.} JSON object + */ + Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Value + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.Value"; + }; + + return Value; + })(); + + v1.ArrayValue = (function() { + + /** + * Properties of an ArrayValue. + * @memberof google.events.cloud.firestore.v1 + * @interface IArrayValue + * @property {Array.|null} [values] ArrayValue values + */ + + /** + * Constructs a new ArrayValue. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents an ArrayValue. + * @implements IArrayValue + * @constructor + * @param {google.events.cloud.firestore.v1.IArrayValue=} [properties] Properties to set + */ + function ArrayValue(properties) { + this.values = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrayValue values. + * @member {Array.} values + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @instance + */ + ArrayValue.prototype.values = $util.emptyArray; + + /** + * Creates a new ArrayValue instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue instance + */ + ArrayValue.create = function create(properties) { + return new ArrayValue(properties); + }; + + /** + * Encodes the specified ArrayValue message. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue} message ArrayValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrayValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + $root.google.events.cloud.firestore.v1.Value.encode(message.values[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ArrayValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue} message ArrayValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrayValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrayValue message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrayValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.ArrayValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push($root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrayValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrayValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrayValue message. + * @function verify + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrayValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) { + var error = $root.google.events.cloud.firestore.v1.Value.verify(message.values[i]); + if (error) + return "values." + error; + } + } + return null; + }; + + /** + * Creates an ArrayValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + */ + ArrayValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.ArrayValue) + return object; + var message = new $root.google.events.cloud.firestore.v1.ArrayValue(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.events.cloud.firestore.v1.ArrayValue.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) { + if (typeof object.values[i] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.ArrayValue.values: object expected"); + message.values[i] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.values[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an ArrayValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.ArrayValue} message ArrayValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrayValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = $root.google.events.cloud.firestore.v1.Value.toObject(message.values[j], options); + } + return object; + }; + + /** + * Converts this ArrayValue to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @instance + * @returns {Object.} JSON object + */ + ArrayValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrayValue + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrayValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.ArrayValue"; + }; + + return ArrayValue; + })(); + + v1.MapValue = (function() { + + /** + * Properties of a MapValue. + * @memberof google.events.cloud.firestore.v1 + * @interface IMapValue + * @property {Object.|null} [fields] MapValue fields + */ + + /** + * Constructs a new MapValue. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a MapValue. + * @implements IMapValue + * @constructor + * @param {google.events.cloud.firestore.v1.IMapValue=} [properties] Properties to set + */ + function MapValue(properties) { + this.fields = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MapValue fields. + * @member {Object.} fields + * @memberof google.events.cloud.firestore.v1.MapValue + * @instance + */ + MapValue.prototype.fields = $util.emptyObject; + + /** + * Creates a new MapValue instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue instance + */ + MapValue.create = function create(properties) { + return new MapValue(properties); + }; + + /** + * Encodes the specified MapValue message. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue} message MapValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MapValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (var keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 1, wireType 2 =*/10).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.events.cloud.firestore.v1.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + return writer; + }; + + /** + * Encodes the specified MapValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue} message MapValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MapValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MapValue message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MapValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.MapValue(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (message.fields === $util.emptyObject) + message.fields = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MapValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MapValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MapValue message. + * @function verify + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MapValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + var key = Object.keys(message.fields); + for (var i = 0; i < key.length; ++i) { + var error = $root.google.events.cloud.firestore.v1.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a MapValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + */ + MapValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.MapValue) + return object; + var message = new $root.google.events.cloud.firestore.v1.MapValue(); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.events.cloud.firestore.v1.MapValue.fields: object expected"); + message.fields = {}; + for (var keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.MapValue.fields: object expected"); + message.fields[keys[i]] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.fields[keys[i]]); + } + } + return message; + }; + + /** + * Creates a plain object from a MapValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.MapValue} message MapValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MapValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.fields = {}; + var keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (var j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.events.cloud.firestore.v1.Value.toObject(message.fields[keys2[j]], options); + } + return object; + }; + + /** + * Converts this MapValue to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.MapValue + * @instance + * @returns {Object.} JSON object + */ + MapValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MapValue + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MapValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.MapValue"; + }; + + return MapValue; + })(); + + return v1; + })(); + + return firestore; + })(); + + return cloud; + })(); + + return events; + })(); + + google.type = (function() { + + /** + * Namespace type. + * @memberof google + * @namespace + */ + var type = {}; + + type.LatLng = (function() { + + /** + * Properties of a LatLng. + * @memberof google.type + * @interface ILatLng + * @property {number|null} [latitude] LatLng latitude + * @property {number|null} [longitude] LatLng longitude + */ + + /** + * Constructs a new LatLng. + * @memberof google.type + * @classdesc Represents a LatLng. + * @implements ILatLng + * @constructor + * @param {google.type.ILatLng=} [properties] Properties to set + */ + function LatLng(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * LatLng latitude. + * @member {number} latitude + * @memberof google.type.LatLng + * @instance + */ + LatLng.prototype.latitude = 0; + + /** + * LatLng longitude. + * @member {number} longitude + * @memberof google.type.LatLng + * @instance + */ + LatLng.prototype.longitude = 0; + + /** + * Creates a new LatLng instance using the specified properties. + * @function create + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng=} [properties] Properties to set + * @returns {google.type.LatLng} LatLng instance + */ + LatLng.create = function create(properties) { + return new LatLng(properties); + }; + + /** + * Encodes the specified LatLng message. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @function encode + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng} message LatLng message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LatLng.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.latitude != null && Object.hasOwnProperty.call(message, "latitude")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.latitude); + if (message.longitude != null && Object.hasOwnProperty.call(message, "longitude")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.longitude); + return writer; + }; + + /** + * Encodes the specified LatLng message, length delimited. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @function encodeDelimited + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng} message LatLng message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LatLng.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a LatLng message from the specified reader or buffer. + * @function decode + * @memberof google.type.LatLng + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.type.LatLng} LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LatLng.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.type.LatLng(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.latitude = reader.double(); + break; + } + case 2: { + message.longitude = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a LatLng message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.type.LatLng + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.type.LatLng} LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LatLng.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a LatLng message. + * @function verify + * @memberof google.type.LatLng + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + LatLng.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.latitude != null && message.hasOwnProperty("latitude")) + if (typeof message.latitude !== "number") + return "latitude: number expected"; + if (message.longitude != null && message.hasOwnProperty("longitude")) + if (typeof message.longitude !== "number") + return "longitude: number expected"; + return null; + }; + + /** + * Creates a LatLng message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.type.LatLng + * @static + * @param {Object.} object Plain object + * @returns {google.type.LatLng} LatLng + */ + LatLng.fromObject = function fromObject(object) { + if (object instanceof $root.google.type.LatLng) + return object; + var message = new $root.google.type.LatLng(); + if (object.latitude != null) + message.latitude = Number(object.latitude); + if (object.longitude != null) + message.longitude = Number(object.longitude); + return message; + }; + + /** + * Creates a plain object from a LatLng message. Also converts values to other types if specified. + * @function toObject + * @memberof google.type.LatLng + * @static + * @param {google.type.LatLng} message LatLng + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + LatLng.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.latitude = 0; + object.longitude = 0; + } + if (message.latitude != null && message.hasOwnProperty("latitude")) + object.latitude = options.json && !isFinite(message.latitude) ? String(message.latitude) : message.latitude; + if (message.longitude != null && message.hasOwnProperty("longitude")) + object.longitude = options.json && !isFinite(message.longitude) ? String(message.longitude) : message.longitude; + return object; + }; + + /** + * Converts this LatLng to JSON. + * @function toJSON + * @memberof google.type.LatLng + * @instance + * @returns {Object.} JSON object + */ + LatLng.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for LatLng + * @function getTypeUrl + * @memberof google.type.LatLng + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + LatLng.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.type.LatLng"; + }; + + return LatLng; + })(); + + return type; + })(); + + return google; +})(); + +module.exports = $root; diff --git a/protos/compiledFirestore.mjs b/protos/compiledFirestore.mjs new file mode 100644 index 000000000..c6ed9ec7d --- /dev/null +++ b/protos/compiledFirestore.mjs @@ -0,0 +1,3512 @@ +/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/ +import $protobuf from "protobufjs/minimal.js"; + +// Common aliases +const $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + +// Exported root namespace +const $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + +export const google = $root.google = (() => { + + /** + * Namespace google. + * @exports google + * @namespace + */ + const google = {}; + + google.protobuf = (function() { + + /** + * Namespace protobuf. + * @memberof google + * @namespace + */ + const protobuf = {}; + + protobuf.Struct = (function() { + + /** + * Properties of a Struct. + * @memberof google.protobuf + * @interface IStruct + * @property {Object.|null} [fields] Struct fields + */ + + /** + * Constructs a new Struct. + * @memberof google.protobuf + * @classdesc Represents a Struct. + * @implements IStruct + * @constructor + * @param {google.protobuf.IStruct=} [properties] Properties to set + */ + function Struct(properties) { + this.fields = {}; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Struct fields. + * @member {Object.} fields + * @memberof google.protobuf.Struct + * @instance + */ + Struct.prototype.fields = $util.emptyObject; + + /** + * Creates a new Struct instance using the specified properties. + * @function create + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct=} [properties] Properties to set + * @returns {google.protobuf.Struct} Struct instance + */ + Struct.create = function create(properties) { + return new Struct(properties); + }; + + /** + * Encodes the specified Struct message. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct} message Struct message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Struct.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (let keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 1, wireType 2 =*/10).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.protobuf.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + return writer; + }; + + /** + * Encodes the specified Struct message, length delimited. Does not implicitly {@link google.protobuf.Struct.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.IStruct} message Struct message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Struct.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Struct message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Struct + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Struct} Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Struct.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Struct(), key, value; + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (message.fields === $util.emptyObject) + message.fields = {}; + let end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + let tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.protobuf.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Struct message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Struct + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Struct} Struct + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Struct.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Struct message. + * @function verify + * @memberof google.protobuf.Struct + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Struct.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + let key = Object.keys(message.fields); + for (let i = 0; i < key.length; ++i) { + let error = $root.google.protobuf.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a Struct message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Struct + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Struct} Struct + */ + Struct.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Struct) + return object; + let message = new $root.google.protobuf.Struct(); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.protobuf.Struct.fields: object expected"); + message.fields = {}; + for (let keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.protobuf.Struct.fields: object expected"); + message.fields[keys[i]] = $root.google.protobuf.Value.fromObject(object.fields[keys[i]]); + } + } + return message; + }; + + /** + * Creates a plain object from a Struct message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Struct + * @static + * @param {google.protobuf.Struct} message Struct + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Struct.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.objects || options.defaults) + object.fields = {}; + let keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (let j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.protobuf.Value.toObject(message.fields[keys2[j]], options); + } + return object; + }; + + /** + * Converts this Struct to JSON. + * @function toJSON + * @memberof google.protobuf.Struct + * @instance + * @returns {Object.} JSON object + */ + Struct.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Struct + * @function getTypeUrl + * @memberof google.protobuf.Struct + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Struct.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Struct"; + }; + + return Struct; + })(); + + protobuf.Value = (function() { + + /** + * Properties of a Value. + * @memberof google.protobuf + * @interface IValue + * @property {google.protobuf.NullValue|null} [nullValue] Value nullValue + * @property {number|null} [numberValue] Value numberValue + * @property {string|null} [stringValue] Value stringValue + * @property {boolean|null} [boolValue] Value boolValue + * @property {google.protobuf.IStruct|null} [structValue] Value structValue + * @property {google.protobuf.IListValue|null} [listValue] Value listValue + */ + + /** + * Constructs a new Value. + * @memberof google.protobuf + * @classdesc Represents a Value. + * @implements IValue + * @constructor + * @param {google.protobuf.IValue=} [properties] Properties to set + */ + function Value(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Value nullValue. + * @member {google.protobuf.NullValue|null|undefined} nullValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.nullValue = null; + + /** + * Value numberValue. + * @member {number|null|undefined} numberValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.numberValue = null; + + /** + * Value stringValue. + * @member {string|null|undefined} stringValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.stringValue = null; + + /** + * Value boolValue. + * @member {boolean|null|undefined} boolValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.boolValue = null; + + /** + * Value structValue. + * @member {google.protobuf.IStruct|null|undefined} structValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.structValue = null; + + /** + * Value listValue. + * @member {google.protobuf.IListValue|null|undefined} listValue + * @memberof google.protobuf.Value + * @instance + */ + Value.prototype.listValue = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * Value kind. + * @member {"nullValue"|"numberValue"|"stringValue"|"boolValue"|"structValue"|"listValue"|undefined} kind + * @memberof google.protobuf.Value + * @instance + */ + Object.defineProperty(Value.prototype, "kind", { + get: $util.oneOfGetter($oneOfFields = ["nullValue", "numberValue", "stringValue", "boolValue", "structValue", "listValue"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new Value instance using the specified properties. + * @function create + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue=} [properties] Properties to set + * @returns {google.protobuf.Value} Value instance + */ + Value.create = function create(properties) { + return new Value(properties); + }; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.nullValue != null && Object.hasOwnProperty.call(message, "nullValue")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.nullValue); + if (message.numberValue != null && Object.hasOwnProperty.call(message, "numberValue")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.numberValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.stringValue); + if (message.boolValue != null && Object.hasOwnProperty.call(message, "boolValue")) + writer.uint32(/* id 4, wireType 0 =*/32).bool(message.boolValue); + if (message.structValue != null && Object.hasOwnProperty.call(message, "structValue")) + $root.google.protobuf.Struct.encode(message.structValue, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.listValue != null && Object.hasOwnProperty.call(message, "listValue")) + $root.google.protobuf.ListValue.encode(message.listValue, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.protobuf.Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Value(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.nullValue = reader.int32(); + break; + } + case 2: { + message.numberValue = reader.double(); + break; + } + case 3: { + message.stringValue = reader.string(); + break; + } + case 4: { + message.boolValue = reader.bool(); + break; + } + case 5: { + message.structValue = $root.google.protobuf.Struct.decode(reader, reader.uint32()); + break; + } + case 6: { + message.listValue = $root.google.protobuf.ListValue.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Value message. + * @function verify + * @memberof google.protobuf.Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + properties.kind = 1; + switch (message.nullValue) { + default: + return "nullValue: enum value expected"; + case 0: + break; + } + } + if (message.numberValue != null && message.hasOwnProperty("numberValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (typeof message.numberValue !== "number") + return "numberValue: number expected"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (!$util.isString(message.stringValue)) + return "stringValue: string expected"; + } + if (message.boolValue != null && message.hasOwnProperty("boolValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + if (typeof message.boolValue !== "boolean") + return "boolValue: boolean expected"; + } + if (message.structValue != null && message.hasOwnProperty("structValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + { + let error = $root.google.protobuf.Struct.verify(message.structValue); + if (error) + return "structValue." + error; + } + } + if (message.listValue != null && message.hasOwnProperty("listValue")) { + if (properties.kind === 1) + return "kind: multiple values"; + properties.kind = 1; + { + let error = $root.google.protobuf.ListValue.verify(message.listValue); + if (error) + return "listValue." + error; + } + } + return null; + }; + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Value} Value + */ + Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Value) + return object; + let message = new $root.google.protobuf.Value(); + switch (object.nullValue) { + default: + if (typeof object.nullValue === "number") { + message.nullValue = object.nullValue; + break; + } + break; + case "NULL_VALUE": + case 0: + message.nullValue = 0; + break; + } + if (object.numberValue != null) + message.numberValue = Number(object.numberValue); + if (object.stringValue != null) + message.stringValue = String(object.stringValue); + if (object.boolValue != null) + message.boolValue = Boolean(object.boolValue); + if (object.structValue != null) { + if (typeof object.structValue !== "object") + throw TypeError(".google.protobuf.Value.structValue: object expected"); + message.structValue = $root.google.protobuf.Struct.fromObject(object.structValue); + } + if (object.listValue != null) { + if (typeof object.listValue !== "object") + throw TypeError(".google.protobuf.Value.listValue: object expected"); + message.listValue = $root.google.protobuf.ListValue.fromObject(object.listValue); + } + return message; + }; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Value + * @static + * @param {google.protobuf.Value} message Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + object.nullValue = options.enums === String ? $root.google.protobuf.NullValue[message.nullValue] === undefined ? message.nullValue : $root.google.protobuf.NullValue[message.nullValue] : message.nullValue; + if (options.oneofs) + object.kind = "nullValue"; + } + if (message.numberValue != null && message.hasOwnProperty("numberValue")) { + object.numberValue = options.json && !isFinite(message.numberValue) ? String(message.numberValue) : message.numberValue; + if (options.oneofs) + object.kind = "numberValue"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + object.stringValue = message.stringValue; + if (options.oneofs) + object.kind = "stringValue"; + } + if (message.boolValue != null && message.hasOwnProperty("boolValue")) { + object.boolValue = message.boolValue; + if (options.oneofs) + object.kind = "boolValue"; + } + if (message.structValue != null && message.hasOwnProperty("structValue")) { + object.structValue = $root.google.protobuf.Struct.toObject(message.structValue, options); + if (options.oneofs) + object.kind = "structValue"; + } + if (message.listValue != null && message.hasOwnProperty("listValue")) { + object.listValue = $root.google.protobuf.ListValue.toObject(message.listValue, options); + if (options.oneofs) + object.kind = "listValue"; + } + return object; + }; + + /** + * Converts this Value to JSON. + * @function toJSON + * @memberof google.protobuf.Value + * @instance + * @returns {Object.} JSON object + */ + Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Value + * @function getTypeUrl + * @memberof google.protobuf.Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Value"; + }; + + return Value; + })(); + + /** + * NullValue enum. + * @name google.protobuf.NullValue + * @enum {number} + * @property {number} NULL_VALUE=0 NULL_VALUE value + */ + protobuf.NullValue = (function() { + const valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "NULL_VALUE"] = 0; + return values; + })(); + + protobuf.ListValue = (function() { + + /** + * Properties of a ListValue. + * @memberof google.protobuf + * @interface IListValue + * @property {Array.|null} [values] ListValue values + */ + + /** + * Constructs a new ListValue. + * @memberof google.protobuf + * @classdesc Represents a ListValue. + * @implements IListValue + * @constructor + * @param {google.protobuf.IListValue=} [properties] Properties to set + */ + function ListValue(properties) { + this.values = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListValue values. + * @member {Array.} values + * @memberof google.protobuf.ListValue + * @instance + */ + ListValue.prototype.values = $util.emptyArray; + + /** + * Creates a new ListValue instance using the specified properties. + * @function create + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue=} [properties] Properties to set + * @returns {google.protobuf.ListValue} ListValue instance + */ + ListValue.create = function create(properties) { + return new ListValue(properties); + }; + + /** + * Encodes the specified ListValue message. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue} message ListValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (let i = 0; i < message.values.length; ++i) + $root.google.protobuf.Value.encode(message.values[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ListValue message, length delimited. Does not implicitly {@link google.protobuf.ListValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.IListValue} message ListValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ListValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ListValue} ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ListValue(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push($root.google.protobuf.Value.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ListValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ListValue} ListValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListValue message. + * @function verify + * @memberof google.protobuf.ListValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (let i = 0; i < message.values.length; ++i) { + let error = $root.google.protobuf.Value.verify(message.values[i]); + if (error) + return "values." + error; + } + } + return null; + }; + + /** + * Creates a ListValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ListValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ListValue} ListValue + */ + ListValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ListValue) + return object; + let message = new $root.google.protobuf.ListValue(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.protobuf.ListValue.values: array expected"); + message.values = []; + for (let i = 0; i < object.values.length; ++i) { + if (typeof object.values[i] !== "object") + throw TypeError(".google.protobuf.ListValue.values: object expected"); + message.values[i] = $root.google.protobuf.Value.fromObject(object.values[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a ListValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ListValue + * @static + * @param {google.protobuf.ListValue} message ListValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (let j = 0; j < message.values.length; ++j) + object.values[j] = $root.google.protobuf.Value.toObject(message.values[j], options); + } + return object; + }; + + /** + * Converts this ListValue to JSON. + * @function toJSON + * @memberof google.protobuf.ListValue + * @instance + * @returns {Object.} JSON object + */ + ListValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListValue + * @function getTypeUrl + * @memberof google.protobuf.ListValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ListValue"; + }; + + return ListValue; + })(); + + protobuf.Timestamp = (function() { + + /** + * Properties of a Timestamp. + * @memberof google.protobuf + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos + */ + + /** + * Constructs a new Timestamp. + * @memberof google.protobuf + * @classdesc Represents a Timestamp. + * @implements ITimestamp + * @constructor + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + */ + function Timestamp(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance + */ + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + let message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.defaults) { + if ($util.Long) { + let long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + + return Timestamp; + })(); + + protobuf.Any = (function() { + + /** + * Properties of an Any. + * @memberof google.protobuf + * @interface IAny + * @property {string|null} [typeUrl] Any typeUrl + * @property {Uint8Array|null} [value] Any value + */ + + /** + * Constructs a new Any. + * @memberof google.protobuf + * @classdesc Represents an Any. + * @implements IAny + * @constructor + * @param {google.protobuf.IAny=} [properties] Properties to set + */ + function Any(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Any typeUrl. + * @member {string} typeUrl + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.typeUrl = ""; + + /** + * Any value. + * @member {Uint8Array} value + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.value = $util.newBuffer([]); + + /** + * Creates a new Any instance using the specified properties. + * @function create + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny=} [properties] Properties to set + * @returns {google.protobuf.Any} Any instance + */ + Any.create = function create(properties) { + return new Any(properties); + }; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.typeUrl != null && Object.hasOwnProperty.call(message, "typeUrl")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.typeUrl); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); + return writer; + }; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Any message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.typeUrl = reader.string(); + break; + } + case 2: { + message.value = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Any message. + * @function verify + * @memberof google.protobuf.Any + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Any.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.typeUrl != null && message.hasOwnProperty("typeUrl")) + if (!$util.isString(message.typeUrl)) + return "typeUrl: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; + return null; + }; + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Any + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Any} Any + */ + Any.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Any) + return object; + let message = new $root.google.protobuf.Any(); + if (object.typeUrl != null) + message.typeUrl = String(object.typeUrl); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length >= 0) + message.value = object.value; + return message; + }; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.Any} message Any + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Any.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.defaults) { + object.typeUrl = ""; + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } + } + if (message.typeUrl != null && message.hasOwnProperty("typeUrl")) + object.typeUrl = message.typeUrl; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; + return object; + }; + + /** + * Converts this Any to JSON. + * @function toJSON + * @memberof google.protobuf.Any + * @instance + * @returns {Object.} JSON object + */ + Any.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Any + * @function getTypeUrl + * @memberof google.protobuf.Any + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Any.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Any"; + }; + + return Any; + })(); + + return protobuf; + })(); + + google.events = (function() { + + /** + * Namespace events. + * @memberof google + * @namespace + */ + const events = {}; + + events.cloud = (function() { + + /** + * Namespace cloud. + * @memberof google.events + * @namespace + */ + const cloud = {}; + + cloud.firestore = (function() { + + /** + * Namespace firestore. + * @memberof google.events.cloud + * @namespace + */ + const firestore = {}; + + firestore.v1 = (function() { + + /** + * Namespace v1. + * @memberof google.events.cloud.firestore + * @namespace + */ + const v1 = {}; + + v1.DocumentEventData = (function() { + + /** + * Properties of a DocumentEventData. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocumentEventData + * @property {google.events.cloud.firestore.v1.IDocument|null} [value] DocumentEventData value + * @property {google.events.cloud.firestore.v1.IDocument|null} [oldValue] DocumentEventData oldValue + * @property {google.events.cloud.firestore.v1.IDocumentMask|null} [updateMask] DocumentEventData updateMask + */ + + /** + * Constructs a new DocumentEventData. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a DocumentEventData. + * @implements IDocumentEventData + * @constructor + * @param {google.events.cloud.firestore.v1.IDocumentEventData=} [properties] Properties to set + */ + function DocumentEventData(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DocumentEventData value. + * @member {google.events.cloud.firestore.v1.IDocument|null|undefined} value + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.value = null; + + /** + * DocumentEventData oldValue. + * @member {google.events.cloud.firestore.v1.IDocument|null|undefined} oldValue + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.oldValue = null; + + /** + * DocumentEventData updateMask. + * @member {google.events.cloud.firestore.v1.IDocumentMask|null|undefined} updateMask + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + */ + DocumentEventData.prototype.updateMask = null; + + /** + * Creates a new DocumentEventData instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData instance + */ + DocumentEventData.create = function create(properties) { + return new DocumentEventData(properties); + }; + + /** + * Encodes the specified DocumentEventData message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData} message DocumentEventData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentEventData.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + $root.google.events.cloud.firestore.v1.Document.encode(message.value, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.oldValue != null && Object.hasOwnProperty.call(message, "oldValue")) + $root.google.events.cloud.firestore.v1.Document.encode(message.oldValue, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.updateMask != null && Object.hasOwnProperty.call(message, "updateMask")) + $root.google.events.cloud.firestore.v1.DocumentMask.encode(message.updateMask, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified DocumentEventData message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentEventData.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.IDocumentEventData} message DocumentEventData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentEventData.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentEventData.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.DocumentEventData(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = $root.google.events.cloud.firestore.v1.Document.decode(reader, reader.uint32()); + break; + } + case 2: { + message.oldValue = $root.google.events.cloud.firestore.v1.Document.decode(reader, reader.uint32()); + break; + } + case 3: { + message.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DocumentEventData message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentEventData.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DocumentEventData message. + * @function verify + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DocumentEventData.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) { + let error = $root.google.events.cloud.firestore.v1.Document.verify(message.value); + if (error) + return "value." + error; + } + if (message.oldValue != null && message.hasOwnProperty("oldValue")) { + let error = $root.google.events.cloud.firestore.v1.Document.verify(message.oldValue); + if (error) + return "oldValue." + error; + } + if (message.updateMask != null && message.hasOwnProperty("updateMask")) { + let error = $root.google.events.cloud.firestore.v1.DocumentMask.verify(message.updateMask); + if (error) + return "updateMask." + error; + } + return null; + }; + + /** + * Creates a DocumentEventData message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.DocumentEventData} DocumentEventData + */ + DocumentEventData.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.DocumentEventData) + return object; + let message = new $root.google.events.cloud.firestore.v1.DocumentEventData(); + if (object.value != null) { + if (typeof object.value !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.value: object expected"); + message.value = $root.google.events.cloud.firestore.v1.Document.fromObject(object.value); + } + if (object.oldValue != null) { + if (typeof object.oldValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.oldValue: object expected"); + message.oldValue = $root.google.events.cloud.firestore.v1.Document.fromObject(object.oldValue); + } + if (object.updateMask != null) { + if (typeof object.updateMask !== "object") + throw TypeError(".google.events.cloud.firestore.v1.DocumentEventData.updateMask: object expected"); + message.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.fromObject(object.updateMask); + } + return message; + }; + + /** + * Creates a plain object from a DocumentEventData message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {google.events.cloud.firestore.v1.DocumentEventData} message DocumentEventData + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DocumentEventData.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.defaults) { + object.value = null; + object.oldValue = null; + object.updateMask = null; + } + if (message.value != null && message.hasOwnProperty("value")) + object.value = $root.google.events.cloud.firestore.v1.Document.toObject(message.value, options); + if (message.oldValue != null && message.hasOwnProperty("oldValue")) + object.oldValue = $root.google.events.cloud.firestore.v1.Document.toObject(message.oldValue, options); + if (message.updateMask != null && message.hasOwnProperty("updateMask")) + object.updateMask = $root.google.events.cloud.firestore.v1.DocumentMask.toObject(message.updateMask, options); + return object; + }; + + /** + * Converts this DocumentEventData to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @instance + * @returns {Object.} JSON object + */ + DocumentEventData.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DocumentEventData + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.DocumentEventData + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DocumentEventData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.DocumentEventData"; + }; + + return DocumentEventData; + })(); + + v1.DocumentMask = (function() { + + /** + * Properties of a DocumentMask. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocumentMask + * @property {Array.|null} [fieldPaths] DocumentMask fieldPaths + */ + + /** + * Constructs a new DocumentMask. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a DocumentMask. + * @implements IDocumentMask + * @constructor + * @param {google.events.cloud.firestore.v1.IDocumentMask=} [properties] Properties to set + */ + function DocumentMask(properties) { + this.fieldPaths = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DocumentMask fieldPaths. + * @member {Array.} fieldPaths + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @instance + */ + DocumentMask.prototype.fieldPaths = $util.emptyArray; + + /** + * Creates a new DocumentMask instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask instance + */ + DocumentMask.create = function create(properties) { + return new DocumentMask(properties); + }; + + /** + * Encodes the specified DocumentMask message. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask} message DocumentMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentMask.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fieldPaths != null && message.fieldPaths.length) + for (let i = 0; i < message.fieldPaths.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.fieldPaths[i]); + return writer; + }; + + /** + * Encodes the specified DocumentMask message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.DocumentMask.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.IDocumentMask} message DocumentMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DocumentMask.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DocumentMask message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentMask.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.DocumentMask(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.fieldPaths && message.fieldPaths.length)) + message.fieldPaths = []; + message.fieldPaths.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DocumentMask message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DocumentMask.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DocumentMask message. + * @function verify + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DocumentMask.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fieldPaths != null && message.hasOwnProperty("fieldPaths")) { + if (!Array.isArray(message.fieldPaths)) + return "fieldPaths: array expected"; + for (let i = 0; i < message.fieldPaths.length; ++i) + if (!$util.isString(message.fieldPaths[i])) + return "fieldPaths: string[] expected"; + } + return null; + }; + + /** + * Creates a DocumentMask message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.DocumentMask} DocumentMask + */ + DocumentMask.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.DocumentMask) + return object; + let message = new $root.google.events.cloud.firestore.v1.DocumentMask(); + if (object.fieldPaths) { + if (!Array.isArray(object.fieldPaths)) + throw TypeError(".google.events.cloud.firestore.v1.DocumentMask.fieldPaths: array expected"); + message.fieldPaths = []; + for (let i = 0; i < object.fieldPaths.length; ++i) + message.fieldPaths[i] = String(object.fieldPaths[i]); + } + return message; + }; + + /** + * Creates a plain object from a DocumentMask message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {google.events.cloud.firestore.v1.DocumentMask} message DocumentMask + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DocumentMask.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.fieldPaths = []; + if (message.fieldPaths && message.fieldPaths.length) { + object.fieldPaths = []; + for (let j = 0; j < message.fieldPaths.length; ++j) + object.fieldPaths[j] = message.fieldPaths[j]; + } + return object; + }; + + /** + * Converts this DocumentMask to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @instance + * @returns {Object.} JSON object + */ + DocumentMask.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DocumentMask + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.DocumentMask + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DocumentMask.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.DocumentMask"; + }; + + return DocumentMask; + })(); + + v1.Document = (function() { + + /** + * Properties of a Document. + * @memberof google.events.cloud.firestore.v1 + * @interface IDocument + * @property {string|null} [name] Document name + * @property {Object.|null} [fields] Document fields + * @property {google.protobuf.ITimestamp|null} [createTime] Document createTime + * @property {google.protobuf.ITimestamp|null} [updateTime] Document updateTime + */ + + /** + * Constructs a new Document. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a Document. + * @implements IDocument + * @constructor + * @param {google.events.cloud.firestore.v1.IDocument=} [properties] Properties to set + */ + function Document(properties) { + this.fields = {}; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Document name. + * @member {string} name + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.name = ""; + + /** + * Document fields. + * @member {Object.} fields + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.fields = $util.emptyObject; + + /** + * Document createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.createTime = null; + + /** + * Document updateTime. + * @member {google.protobuf.ITimestamp|null|undefined} updateTime + * @memberof google.events.cloud.firestore.v1.Document + * @instance + */ + Document.prototype.updateTime = null; + + /** + * Creates a new Document instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.Document} Document instance + */ + Document.create = function create(properties) { + return new Document(properties); + }; + + /** + * Encodes the specified Document message. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument} message Document message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Document.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (let keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.events.cloud.firestore.v1.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.updateTime != null && Object.hasOwnProperty.call(message, "updateTime")) + $root.google.protobuf.Timestamp.encode(message.updateTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Document message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Document.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.IDocument} message Document message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Document.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Document message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.Document} Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Document.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.Document(), key, value; + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (message.fields === $util.emptyObject) + message.fields = {}; + let end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + let tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + case 3: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 4: { + message.updateTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Document message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.Document} Document + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Document.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Document message. + * @function verify + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Document.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + let key = Object.keys(message.fields); + for (let i = 0; i < key.length; ++i) { + let error = $root.google.events.cloud.firestore.v1.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + let error = $root.google.protobuf.Timestamp.verify(message.createTime); + if (error) + return "createTime." + error; + } + if (message.updateTime != null && message.hasOwnProperty("updateTime")) { + let error = $root.google.protobuf.Timestamp.verify(message.updateTime); + if (error) + return "updateTime." + error; + } + return null; + }; + + /** + * Creates a Document message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.Document} Document + */ + Document.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.Document) + return object; + let message = new $root.google.events.cloud.firestore.v1.Document(); + if (object.name != null) + message.name = String(object.name); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.fields: object expected"); + message.fields = {}; + for (let keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.fields: object expected"); + message.fields[keys[i]] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.fields[keys[i]]); + } + } + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.updateTime != null) { + if (typeof object.updateTime !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Document.updateTime: object expected"); + message.updateTime = $root.google.protobuf.Timestamp.fromObject(object.updateTime); + } + return message; + }; + + /** + * Creates a plain object from a Document message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {google.events.cloud.firestore.v1.Document} message Document + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Document.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.objects || options.defaults) + object.fields = {}; + if (options.defaults) { + object.name = ""; + object.createTime = null; + object.updateTime = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + let keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (let j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.events.cloud.firestore.v1.Value.toObject(message.fields[keys2[j]], options); + } + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.updateTime != null && message.hasOwnProperty("updateTime")) + object.updateTime = $root.google.protobuf.Timestamp.toObject(message.updateTime, options); + return object; + }; + + /** + * Converts this Document to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.Document + * @instance + * @returns {Object.} JSON object + */ + Document.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Document + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.Document + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Document.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.Document"; + }; + + return Document; + })(); + + v1.Value = (function() { + + /** + * Properties of a Value. + * @memberof google.events.cloud.firestore.v1 + * @interface IValue + * @property {google.protobuf.NullValue|null} [nullValue] Value nullValue + * @property {boolean|null} [booleanValue] Value booleanValue + * @property {number|Long|null} [integerValue] Value integerValue + * @property {number|null} [doubleValue] Value doubleValue + * @property {google.protobuf.ITimestamp|null} [timestampValue] Value timestampValue + * @property {string|null} [stringValue] Value stringValue + * @property {Uint8Array|null} [bytesValue] Value bytesValue + * @property {string|null} [referenceValue] Value referenceValue + * @property {google.type.ILatLng|null} [geoPointValue] Value geoPointValue + * @property {google.events.cloud.firestore.v1.IArrayValue|null} [arrayValue] Value arrayValue + * @property {google.events.cloud.firestore.v1.IMapValue|null} [mapValue] Value mapValue + */ + + /** + * Constructs a new Value. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a Value. + * @implements IValue + * @constructor + * @param {google.events.cloud.firestore.v1.IValue=} [properties] Properties to set + */ + function Value(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Value nullValue. + * @member {google.protobuf.NullValue|null|undefined} nullValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.nullValue = null; + + /** + * Value booleanValue. + * @member {boolean|null|undefined} booleanValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.booleanValue = null; + + /** + * Value integerValue. + * @member {number|Long|null|undefined} integerValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.integerValue = null; + + /** + * Value doubleValue. + * @member {number|null|undefined} doubleValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.doubleValue = null; + + /** + * Value timestampValue. + * @member {google.protobuf.ITimestamp|null|undefined} timestampValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.timestampValue = null; + + /** + * Value stringValue. + * @member {string|null|undefined} stringValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.stringValue = null; + + /** + * Value bytesValue. + * @member {Uint8Array|null|undefined} bytesValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.bytesValue = null; + + /** + * Value referenceValue. + * @member {string|null|undefined} referenceValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.referenceValue = null; + + /** + * Value geoPointValue. + * @member {google.type.ILatLng|null|undefined} geoPointValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.geoPointValue = null; + + /** + * Value arrayValue. + * @member {google.events.cloud.firestore.v1.IArrayValue|null|undefined} arrayValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.arrayValue = null; + + /** + * Value mapValue. + * @member {google.events.cloud.firestore.v1.IMapValue|null|undefined} mapValue + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Value.prototype.mapValue = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * Value valueType. + * @member {"nullValue"|"booleanValue"|"integerValue"|"doubleValue"|"timestampValue"|"stringValue"|"bytesValue"|"referenceValue"|"geoPointValue"|"arrayValue"|"mapValue"|undefined} valueType + * @memberof google.events.cloud.firestore.v1.Value + * @instance + */ + Object.defineProperty(Value.prototype, "valueType", { + get: $util.oneOfGetter($oneOfFields = ["nullValue", "booleanValue", "integerValue", "doubleValue", "timestampValue", "stringValue", "bytesValue", "referenceValue", "geoPointValue", "arrayValue", "mapValue"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new Value instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.Value} Value instance + */ + Value.create = function create(properties) { + return new Value(properties); + }; + + /** + * Encodes the specified Value message. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.booleanValue != null && Object.hasOwnProperty.call(message, "booleanValue")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.booleanValue); + if (message.integerValue != null && Object.hasOwnProperty.call(message, "integerValue")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.integerValue); + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) + writer.uint32(/* id 3, wireType 1 =*/25).double(message.doubleValue); + if (message.referenceValue != null && Object.hasOwnProperty.call(message, "referenceValue")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.referenceValue); + if (message.mapValue != null && Object.hasOwnProperty.call(message, "mapValue")) + $root.google.events.cloud.firestore.v1.MapValue.encode(message.mapValue, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.geoPointValue != null && Object.hasOwnProperty.call(message, "geoPointValue")) + $root.google.type.LatLng.encode(message.geoPointValue, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.arrayValue != null && Object.hasOwnProperty.call(message, "arrayValue")) + $root.google.events.cloud.firestore.v1.ArrayValue.encode(message.arrayValue, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.timestampValue != null && Object.hasOwnProperty.call(message, "timestampValue")) + $root.google.protobuf.Timestamp.encode(message.timestampValue, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.nullValue != null && Object.hasOwnProperty.call(message, "nullValue")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.nullValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 17, wireType 2 =*/138).string(message.stringValue); + if (message.bytesValue != null && Object.hasOwnProperty.call(message, "bytesValue")) + writer.uint32(/* id 18, wireType 2 =*/146).bytes(message.bytesValue); + return writer; + }; + + /** + * Encodes the specified Value message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.IValue} message Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Value message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.Value(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 11: { + message.nullValue = reader.int32(); + break; + } + case 1: { + message.booleanValue = reader.bool(); + break; + } + case 2: { + message.integerValue = reader.int64(); + break; + } + case 3: { + message.doubleValue = reader.double(); + break; + } + case 10: { + message.timestampValue = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 17: { + message.stringValue = reader.string(); + break; + } + case 18: { + message.bytesValue = reader.bytes(); + break; + } + case 5: { + message.referenceValue = reader.string(); + break; + } + case 8: { + message.geoPointValue = $root.google.type.LatLng.decode(reader, reader.uint32()); + break; + } + case 9: { + message.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.decode(reader, reader.uint32()); + break; + } + case 6: { + message.mapValue = $root.google.events.cloud.firestore.v1.MapValue.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.Value} Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Value message. + * @function verify + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + properties.valueType = 1; + switch (message.nullValue) { + default: + return "nullValue: enum value expected"; + case 0: + break; + } + } + if (message.booleanValue != null && message.hasOwnProperty("booleanValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (typeof message.booleanValue !== "boolean") + return "booleanValue: boolean expected"; + } + if (message.integerValue != null && message.hasOwnProperty("integerValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isInteger(message.integerValue) && !(message.integerValue && $util.isInteger(message.integerValue.low) && $util.isInteger(message.integerValue.high))) + return "integerValue: integer|Long expected"; + } + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (typeof message.doubleValue !== "number") + return "doubleValue: number expected"; + } + if (message.timestampValue != null && message.hasOwnProperty("timestampValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + let error = $root.google.protobuf.Timestamp.verify(message.timestampValue); + if (error) + return "timestampValue." + error; + } + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isString(message.stringValue)) + return "stringValue: string expected"; + } + if (message.bytesValue != null && message.hasOwnProperty("bytesValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!(message.bytesValue && typeof message.bytesValue.length === "number" || $util.isString(message.bytesValue))) + return "bytesValue: buffer expected"; + } + if (message.referenceValue != null && message.hasOwnProperty("referenceValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + if (!$util.isString(message.referenceValue)) + return "referenceValue: string expected"; + } + if (message.geoPointValue != null && message.hasOwnProperty("geoPointValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + let error = $root.google.type.LatLng.verify(message.geoPointValue); + if (error) + return "geoPointValue." + error; + } + } + if (message.arrayValue != null && message.hasOwnProperty("arrayValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + let error = $root.google.events.cloud.firestore.v1.ArrayValue.verify(message.arrayValue); + if (error) + return "arrayValue." + error; + } + } + if (message.mapValue != null && message.hasOwnProperty("mapValue")) { + if (properties.valueType === 1) + return "valueType: multiple values"; + properties.valueType = 1; + { + let error = $root.google.events.cloud.firestore.v1.MapValue.verify(message.mapValue); + if (error) + return "mapValue." + error; + } + } + return null; + }; + + /** + * Creates a Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.Value} Value + */ + Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.Value) + return object; + let message = new $root.google.events.cloud.firestore.v1.Value(); + switch (object.nullValue) { + default: + if (typeof object.nullValue === "number") { + message.nullValue = object.nullValue; + break; + } + break; + case "NULL_VALUE": + case 0: + message.nullValue = 0; + break; + } + if (object.booleanValue != null) + message.booleanValue = Boolean(object.booleanValue); + if (object.integerValue != null) + if ($util.Long) + (message.integerValue = $util.Long.fromValue(object.integerValue)).unsigned = false; + else if (typeof object.integerValue === "string") + message.integerValue = parseInt(object.integerValue, 10); + else if (typeof object.integerValue === "number") + message.integerValue = object.integerValue; + else if (typeof object.integerValue === "object") + message.integerValue = new $util.LongBits(object.integerValue.low >>> 0, object.integerValue.high >>> 0).toNumber(); + if (object.doubleValue != null) + message.doubleValue = Number(object.doubleValue); + if (object.timestampValue != null) { + if (typeof object.timestampValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.timestampValue: object expected"); + message.timestampValue = $root.google.protobuf.Timestamp.fromObject(object.timestampValue); + } + if (object.stringValue != null) + message.stringValue = String(object.stringValue); + if (object.bytesValue != null) + if (typeof object.bytesValue === "string") + $util.base64.decode(object.bytesValue, message.bytesValue = $util.newBuffer($util.base64.length(object.bytesValue)), 0); + else if (object.bytesValue.length >= 0) + message.bytesValue = object.bytesValue; + if (object.referenceValue != null) + message.referenceValue = String(object.referenceValue); + if (object.geoPointValue != null) { + if (typeof object.geoPointValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.geoPointValue: object expected"); + message.geoPointValue = $root.google.type.LatLng.fromObject(object.geoPointValue); + } + if (object.arrayValue != null) { + if (typeof object.arrayValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.arrayValue: object expected"); + message.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.fromObject(object.arrayValue); + } + if (object.mapValue != null) { + if (typeof object.mapValue !== "object") + throw TypeError(".google.events.cloud.firestore.v1.Value.mapValue: object expected"); + message.mapValue = $root.google.events.cloud.firestore.v1.MapValue.fromObject(object.mapValue); + } + return message; + }; + + /** + * Creates a plain object from a Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {google.events.cloud.firestore.v1.Value} message Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (message.booleanValue != null && message.hasOwnProperty("booleanValue")) { + object.booleanValue = message.booleanValue; + if (options.oneofs) + object.valueType = "booleanValue"; + } + if (message.integerValue != null && message.hasOwnProperty("integerValue")) { + if (typeof message.integerValue === "number") + object.integerValue = options.longs === String ? String(message.integerValue) : message.integerValue; + else + object.integerValue = options.longs === String ? $util.Long.prototype.toString.call(message.integerValue) : options.longs === Number ? new $util.LongBits(message.integerValue.low >>> 0, message.integerValue.high >>> 0).toNumber() : message.integerValue; + if (options.oneofs) + object.valueType = "integerValue"; + } + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) { + object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; + if (options.oneofs) + object.valueType = "doubleValue"; + } + if (message.referenceValue != null && message.hasOwnProperty("referenceValue")) { + object.referenceValue = message.referenceValue; + if (options.oneofs) + object.valueType = "referenceValue"; + } + if (message.mapValue != null && message.hasOwnProperty("mapValue")) { + object.mapValue = $root.google.events.cloud.firestore.v1.MapValue.toObject(message.mapValue, options); + if (options.oneofs) + object.valueType = "mapValue"; + } + if (message.geoPointValue != null && message.hasOwnProperty("geoPointValue")) { + object.geoPointValue = $root.google.type.LatLng.toObject(message.geoPointValue, options); + if (options.oneofs) + object.valueType = "geoPointValue"; + } + if (message.arrayValue != null && message.hasOwnProperty("arrayValue")) { + object.arrayValue = $root.google.events.cloud.firestore.v1.ArrayValue.toObject(message.arrayValue, options); + if (options.oneofs) + object.valueType = "arrayValue"; + } + if (message.timestampValue != null && message.hasOwnProperty("timestampValue")) { + object.timestampValue = $root.google.protobuf.Timestamp.toObject(message.timestampValue, options); + if (options.oneofs) + object.valueType = "timestampValue"; + } + if (message.nullValue != null && message.hasOwnProperty("nullValue")) { + object.nullValue = options.enums === String ? $root.google.protobuf.NullValue[message.nullValue] === undefined ? message.nullValue : $root.google.protobuf.NullValue[message.nullValue] : message.nullValue; + if (options.oneofs) + object.valueType = "nullValue"; + } + if (message.stringValue != null && message.hasOwnProperty("stringValue")) { + object.stringValue = message.stringValue; + if (options.oneofs) + object.valueType = "stringValue"; + } + if (message.bytesValue != null && message.hasOwnProperty("bytesValue")) { + object.bytesValue = options.bytes === String ? $util.base64.encode(message.bytesValue, 0, message.bytesValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.bytesValue) : message.bytesValue; + if (options.oneofs) + object.valueType = "bytesValue"; + } + return object; + }; + + /** + * Converts this Value to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.Value + * @instance + * @returns {Object.} JSON object + */ + Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Value + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.Value"; + }; + + return Value; + })(); + + v1.ArrayValue = (function() { + + /** + * Properties of an ArrayValue. + * @memberof google.events.cloud.firestore.v1 + * @interface IArrayValue + * @property {Array.|null} [values] ArrayValue values + */ + + /** + * Constructs a new ArrayValue. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents an ArrayValue. + * @implements IArrayValue + * @constructor + * @param {google.events.cloud.firestore.v1.IArrayValue=} [properties] Properties to set + */ + function ArrayValue(properties) { + this.values = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrayValue values. + * @member {Array.} values + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @instance + */ + ArrayValue.prototype.values = $util.emptyArray; + + /** + * Creates a new ArrayValue instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue instance + */ + ArrayValue.create = function create(properties) { + return new ArrayValue(properties); + }; + + /** + * Encodes the specified ArrayValue message. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue} message ArrayValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrayValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (let i = 0; i < message.values.length; ++i) + $root.google.events.cloud.firestore.v1.Value.encode(message.values[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ArrayValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.ArrayValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.IArrayValue} message ArrayValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrayValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrayValue message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrayValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.ArrayValue(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push($root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrayValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrayValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrayValue message. + * @function verify + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrayValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (let i = 0; i < message.values.length; ++i) { + let error = $root.google.events.cloud.firestore.v1.Value.verify(message.values[i]); + if (error) + return "values." + error; + } + } + return null; + }; + + /** + * Creates an ArrayValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.ArrayValue} ArrayValue + */ + ArrayValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.ArrayValue) + return object; + let message = new $root.google.events.cloud.firestore.v1.ArrayValue(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.events.cloud.firestore.v1.ArrayValue.values: array expected"); + message.values = []; + for (let i = 0; i < object.values.length; ++i) { + if (typeof object.values[i] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.ArrayValue.values: object expected"); + message.values[i] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.values[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an ArrayValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {google.events.cloud.firestore.v1.ArrayValue} message ArrayValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrayValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (let j = 0; j < message.values.length; ++j) + object.values[j] = $root.google.events.cloud.firestore.v1.Value.toObject(message.values[j], options); + } + return object; + }; + + /** + * Converts this ArrayValue to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @instance + * @returns {Object.} JSON object + */ + ArrayValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrayValue + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.ArrayValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrayValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.ArrayValue"; + }; + + return ArrayValue; + })(); + + v1.MapValue = (function() { + + /** + * Properties of a MapValue. + * @memberof google.events.cloud.firestore.v1 + * @interface IMapValue + * @property {Object.|null} [fields] MapValue fields + */ + + /** + * Constructs a new MapValue. + * @memberof google.events.cloud.firestore.v1 + * @classdesc Represents a MapValue. + * @implements IMapValue + * @constructor + * @param {google.events.cloud.firestore.v1.IMapValue=} [properties] Properties to set + */ + function MapValue(properties) { + this.fields = {}; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MapValue fields. + * @member {Object.} fields + * @memberof google.events.cloud.firestore.v1.MapValue + * @instance + */ + MapValue.prototype.fields = $util.emptyObject; + + /** + * Creates a new MapValue instance using the specified properties. + * @function create + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue=} [properties] Properties to set + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue instance + */ + MapValue.create = function create(properties) { + return new MapValue(properties); + }; + + /** + * Encodes the specified MapValue message. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @function encode + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue} message MapValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MapValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) + for (let keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { + writer.uint32(/* id 1, wireType 2 =*/10).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); + $root.google.events.cloud.firestore.v1.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); + } + return writer; + }; + + /** + * Encodes the specified MapValue message, length delimited. Does not implicitly {@link google.events.cloud.firestore.v1.MapValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.IMapValue} message MapValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MapValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MapValue message from the specified reader or buffer. + * @function decode + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MapValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.events.cloud.firestore.v1.MapValue(), key, value; + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (message.fields === $util.emptyObject) + message.fields = {}; + let end2 = reader.uint32() + reader.pos; + key = ""; + value = null; + while (reader.pos < end2) { + let tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = $root.google.events.cloud.firestore.v1.Value.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.fields[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MapValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MapValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MapValue message. + * @function verify + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MapValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!$util.isObject(message.fields)) + return "fields: object expected"; + let key = Object.keys(message.fields); + for (let i = 0; i < key.length; ++i) { + let error = $root.google.events.cloud.firestore.v1.Value.verify(message.fields[key[i]]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a MapValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {Object.} object Plain object + * @returns {google.events.cloud.firestore.v1.MapValue} MapValue + */ + MapValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.events.cloud.firestore.v1.MapValue) + return object; + let message = new $root.google.events.cloud.firestore.v1.MapValue(); + if (object.fields) { + if (typeof object.fields !== "object") + throw TypeError(".google.events.cloud.firestore.v1.MapValue.fields: object expected"); + message.fields = {}; + for (let keys = Object.keys(object.fields), i = 0; i < keys.length; ++i) { + if (typeof object.fields[keys[i]] !== "object") + throw TypeError(".google.events.cloud.firestore.v1.MapValue.fields: object expected"); + message.fields[keys[i]] = $root.google.events.cloud.firestore.v1.Value.fromObject(object.fields[keys[i]]); + } + } + return message; + }; + + /** + * Creates a plain object from a MapValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {google.events.cloud.firestore.v1.MapValue} message MapValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MapValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.objects || options.defaults) + object.fields = {}; + let keys2; + if (message.fields && (keys2 = Object.keys(message.fields)).length) { + object.fields = {}; + for (let j = 0; j < keys2.length; ++j) + object.fields[keys2[j]] = $root.google.events.cloud.firestore.v1.Value.toObject(message.fields[keys2[j]], options); + } + return object; + }; + + /** + * Converts this MapValue to JSON. + * @function toJSON + * @memberof google.events.cloud.firestore.v1.MapValue + * @instance + * @returns {Object.} JSON object + */ + MapValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MapValue + * @function getTypeUrl + * @memberof google.events.cloud.firestore.v1.MapValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MapValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.events.cloud.firestore.v1.MapValue"; + }; + + return MapValue; + })(); + + return v1; + })(); + + return firestore; + })(); + + return cloud; + })(); + + return events; + })(); + + google.type = (function() { + + /** + * Namespace type. + * @memberof google + * @namespace + */ + const type = {}; + + type.LatLng = (function() { + + /** + * Properties of a LatLng. + * @memberof google.type + * @interface ILatLng + * @property {number|null} [latitude] LatLng latitude + * @property {number|null} [longitude] LatLng longitude + */ + + /** + * Constructs a new LatLng. + * @memberof google.type + * @classdesc Represents a LatLng. + * @implements ILatLng + * @constructor + * @param {google.type.ILatLng=} [properties] Properties to set + */ + function LatLng(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * LatLng latitude. + * @member {number} latitude + * @memberof google.type.LatLng + * @instance + */ + LatLng.prototype.latitude = 0; + + /** + * LatLng longitude. + * @member {number} longitude + * @memberof google.type.LatLng + * @instance + */ + LatLng.prototype.longitude = 0; + + /** + * Creates a new LatLng instance using the specified properties. + * @function create + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng=} [properties] Properties to set + * @returns {google.type.LatLng} LatLng instance + */ + LatLng.create = function create(properties) { + return new LatLng(properties); + }; + + /** + * Encodes the specified LatLng message. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @function encode + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng} message LatLng message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LatLng.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.latitude != null && Object.hasOwnProperty.call(message, "latitude")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.latitude); + if (message.longitude != null && Object.hasOwnProperty.call(message, "longitude")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.longitude); + return writer; + }; + + /** + * Encodes the specified LatLng message, length delimited. Does not implicitly {@link google.type.LatLng.verify|verify} messages. + * @function encodeDelimited + * @memberof google.type.LatLng + * @static + * @param {google.type.ILatLng} message LatLng message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LatLng.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a LatLng message from the specified reader or buffer. + * @function decode + * @memberof google.type.LatLng + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.type.LatLng} LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LatLng.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.type.LatLng(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.latitude = reader.double(); + break; + } + case 2: { + message.longitude = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a LatLng message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.type.LatLng + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.type.LatLng} LatLng + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LatLng.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a LatLng message. + * @function verify + * @memberof google.type.LatLng + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + LatLng.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.latitude != null && message.hasOwnProperty("latitude")) + if (typeof message.latitude !== "number") + return "latitude: number expected"; + if (message.longitude != null && message.hasOwnProperty("longitude")) + if (typeof message.longitude !== "number") + return "longitude: number expected"; + return null; + }; + + /** + * Creates a LatLng message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.type.LatLng + * @static + * @param {Object.} object Plain object + * @returns {google.type.LatLng} LatLng + */ + LatLng.fromObject = function fromObject(object) { + if (object instanceof $root.google.type.LatLng) + return object; + let message = new $root.google.type.LatLng(); + if (object.latitude != null) + message.latitude = Number(object.latitude); + if (object.longitude != null) + message.longitude = Number(object.longitude); + return message; + }; + + /** + * Creates a plain object from a LatLng message. Also converts values to other types if specified. + * @function toObject + * @memberof google.type.LatLng + * @static + * @param {google.type.LatLng} message LatLng + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + LatLng.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.defaults) { + object.latitude = 0; + object.longitude = 0; + } + if (message.latitude != null && message.hasOwnProperty("latitude")) + object.latitude = options.json && !isFinite(message.latitude) ? String(message.latitude) : message.latitude; + if (message.longitude != null && message.hasOwnProperty("longitude")) + object.longitude = options.json && !isFinite(message.longitude) ? String(message.longitude) : message.longitude; + return object; + }; + + /** + * Converts this LatLng to JSON. + * @function toJSON + * @memberof google.type.LatLng + * @instance + * @returns {Object.} JSON object + */ + LatLng.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for LatLng + * @function getTypeUrl + * @memberof google.type.LatLng + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + LatLng.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.type.LatLng"; + }; + + return LatLng; + })(); + + return type; + })(); + + return google; +})(); + +export { $root as default }; diff --git a/protos/update.sh b/protos/update.sh new file mode 100755 index 000000000..f3fd54ee1 --- /dev/null +++ b/protos/update.sh @@ -0,0 +1,96 @@ +#!/bin/bash + +# The MIT License (MIT) +# +# Copyright (c) 2023 Firebase +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# vars +PROTOS_DIR="$(pwd)" +WORK_DIR=$(mktemp -d) + +# deletes the temp directory on exit +function cleanup { + rm -rf "$WORK_DIR" + echo "Deleted temp working directory $WORK_DIR" + rm -rf "${PROTOS_DIR}/data.proto" "${PROTOS_DIR}/any.proto" "${PROTOS_DIR}/google" + echo "Deleted copied protos" +} + +# register the cleanup function to be called on the EXIT signal +trap cleanup EXIT + +# enter working directory +pushd "$WORK_DIR" + +git clone --depth 1 https://github.com/googleapis/google-cloudevents.git +git clone --depth 1 https://github.com/googleapis/googleapis.git +git clone --depth 1 https://github.com/google/protobuf.git + +# make dirs +mkdir -p "${PROTOS_DIR}/google/type" + +# copy protos +cp google-cloudevents/proto/google/events/cloud/firestore/v1/data.proto \ + "${PROTOS_DIR}/" + +cp protobuf/src/google/protobuf/any.proto \ + "${PROTOS_DIR}/" + +cp protobuf/src/google/protobuf/struct.proto \ + "${PROTOS_DIR}/google/" + +cp protobuf/src/google/protobuf/timestamp.proto \ + "${PROTOS_DIR}/google/" + +cp googleapis/google/type/latlng.proto \ + "${PROTOS_DIR}/google/type/" + +popd + +PBJS="npx pbjs" +PBTS="npx pbts" + +# Generate CommonJS +${PBJS} -t static-module -w commonjs -o compiledFirestore.js \ + data.proto any.proto + +# Generate ESM +${PBJS} -t static-module -w es6 -o compiledFirestore.mjs \ + data.proto any.proto + +# Generate Types +${PBTS} -o compiledFirestore.d.ts compiledFirestore.js +# +# Fix imports for Node ESM in the generated .mjs file. +# See: https://github.com/protobufjs/protobuf.js/issues/1929 +if [[ "$OSTYPE" == "darwin"* ]]; then + # 1. Append .js extension: Node ESM requires full paths for subpath imports not in 'exports'. + sed -i '' 's|protobufjs/minimal|protobufjs/minimal.js|g' compiledFirestore.mjs + # 2. Use default import: protobufjs is CJS. 'import * as' creates a namespace where + # module.exports is under .default. Generated code expects $protobuf to be module.exports directly. + sed -i '' 's|import \* as \$protobuf|import \$protobuf|g' compiledFirestore.mjs +else + # 1. Append .js extension. + sed -i 's|protobufjs/minimal|protobufjs/minimal.js|g' compiledFirestore.mjs + # 2. Use default import. + sed -i 's|import \* as \$protobuf|import \$protobuf|g' compiledFirestore.mjs +fi + diff --git a/scripts/bin-test/extsdks/local/index.d.ts b/scripts/bin-test/extsdks/local/index.d.ts new file mode 100644 index 000000000..620621e2e --- /dev/null +++ b/scripts/bin-test/extsdks/local/index.d.ts @@ -0,0 +1,46 @@ +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester SDK for backfill@0.0.2 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for Local extension. + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +export type DoBackfillParam = "True" | "False"; +export type LocationParam = + | "us-central1" + | "us-east1" + | "us-east4" + | "europe-west1" + | "europe-west2" + | "europe-west3" + | "asia-east2" + | "asia-northeast1"; +/** + * Parameters for backfill@0.0.2 extension + */ +export interface BackfillParams { + /** + * Do a backfill + */ + DO_BACKFILL: DoBackfillParam; + /** + * Cloud Functions location + */ + LOCATION: LocationParam; +} +export declare function backfill(instanceId: string, params: BackfillParams): Backfill; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester + * A tester for the TaskQueue/LCE/RuntimeStatus project + */ +export declare class Backfill { + private instanceId; + private params; + readonly FIREBASE_EXTENSION_LOCAL_PATH = + "./functions/generated/extensions/local/backfill/0.0.2/src"; + constructor(instanceId: string, params: BackfillParams); + getInstanceId(): string; + getParams(): BackfillParams; +} diff --git a/scripts/bin-test/extsdks/local/index.js b/scripts/bin-test/extsdks/local/index.js new file mode 100644 index 000000000..f1f9cce55 --- /dev/null +++ b/scripts/bin-test/extsdks/local/index.js @@ -0,0 +1,30 @@ +"use strict"; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester SDK for extensions-try-backfill3@0.0.2 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for Local extension. + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.backfill = exports.backfill = void 0; +function backfill(instanceId, params) { + return new Backfill(instanceId, params); +} +exports.backfill = backfill; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester + * A tester for the TaskQueue/LCE/RuntimeStatus project + */ +class Backfill { + constructor(instanceId, params) { + this.instanceId = instanceId; + this.params = params; + this.FIREBASE_EXTENSION_LOCAL_PATH = "./functions/generated/extensions/local/backfill/0.0.2/src"; + } + getInstanceId() { return this.instanceId; } + getParams() { return this.params; } +} +exports.Backfill = Backfill; diff --git a/scripts/bin-test/extsdks/local/package.json b/scripts/bin-test/extsdks/local/package.json new file mode 100644 index 000000000..700806b3e --- /dev/null +++ b/scripts/bin-test/extsdks/local/package.json @@ -0,0 +1,4 @@ +{ + "name": "@firebase-extensions/local-backfill-sdk", + "main": "./index.js" + } \ No newline at end of file diff --git a/scripts/bin-test/extsdks/translate/index.d.ts b/scripts/bin-test/extsdks/translate/index.d.ts new file mode 100644 index 000000000..ba706712c --- /dev/null +++ b/scripts/bin-test/extsdks/translate/index.d.ts @@ -0,0 +1,169 @@ +/** + * Translate Text in Firestore SDK for firestore-translate-text@0.1.18 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for firestore-translate-text@0.1.18" + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +import { CloudEvent } from "../../../../v2"; +import { EventarcTriggerOptions } from "../../../../v2/eventarc"; +export type EventCallback = (event: CloudEvent) => unknown | Promise; +export type SimpleEventarcTriggerOptions = Omit< + EventarcTriggerOptions, + "eventType" | "channel" | "region" +>; +export type EventArcRegionType = "us-central1" | "us-west1" | "europe-west4" | "asia-northeast1"; +export type SystemFunctionVpcConnectorEgressSettingsParam = + | "VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED" + | "PRIVATE_RANGES_ONLY" + | "ALL_TRAFFIC"; +export type SystemFunctionIngressSettingsParam = + | "ALLOW_ALL" + | "ALLOW_INTERNAL_ONLY" + | "ALLOW_INTERNAL_AND_GCLB"; +export type SystemFunctionLocationParam = + | "us-central1" + | "us-east1" + | "us-east4" + | "us-west1" + | "us-west2" + | "us-west3" + | "us-west4" + | "europe-central2" + | "europe-west1" + | "europe-west2" + | "europe-west3" + | "europe-west6" + | "asia-east1" + | "asia-east2" + | "asia-northeast1" + | "asia-northeast2" + | "asia-northeast3" + | "asia-south1" + | "asia-southeast1" + | "asia-southeast2" + | "northamerica-northeast1" + | "southamerica-east1" + | "australia-southeast1"; +export type SystemFunctionMemoryParam = "128" | "256" | "512" | "1024" | "2048" | "4096" | "8192"; +/** + * Parameters for firestore-translate-text@0.1.18 extension + */ +export interface FirestoreTranslateTextParams { + /** + * Target languages for translations, as a comma-separated list + */ + LANGUAGES: string; + /** + * Collection path + */ + COLLECTION_PATH: string; + /** + * Input field name + */ + INPUT_FIELD_NAME: string; + /** + * Translations output field name + */ + OUTPUT_FIELD_NAME: string; + /** + * Languages field name + */ + LANGUAGES_FIELD_NAME?: string; + /** + * Event Arc Region + */ + _EVENT_ARC_REGION?: EventArcRegionType; + /** + * Function timeout seconds + */ + _FUNCTION_TIMEOUT_SECONDS?: string; + /** + * VPC Connector + */ + _FUNCTION_VPC_CONNECTOR?: string; + /** + * VPC Connector Egress settings + */ + _FUNCTION_VPC_CONNECTOR_EGRESS_SETTINGS?: SystemFunctionVpcConnectorEgressSettingsParam; + /** + * Minimum function instances + */ + _FUNCTION_MIN_INSTANCES?: string; + /** + * Maximum function instances + */ + _FUNCTION_MAX_INSTANCES?: string; + /** + * Function ingress settings + */ + _FUNCTION_INGRESS_SETTINGS?: SystemFunctionIngressSettingsParam; + /** + * Function labels + */ + _FUNCTION_LABELS?: string; + /** + * KMS key name + */ + _FUNCTION_KMS_KEY_NAME?: string; + /** + * Docker repository + */ + _FUNCTION_DOCKER_REPOSITORY?: string; + /** + * Cloud Functions location + */ + _FUNCTION_LOCATION: SystemFunctionLocationParam; + /** + * Function memory + */ + _FUNCTION_MEMORY?: SystemFunctionMemoryParam; +} +export declare function firestoreTranslateText( + instanceId: string, + params: FirestoreTranslateTextParams +): FirestoreTranslateText; +/** + * Translate Text in Firestore + * Translates strings written to a Cloud Firestore collection into multiple languages (uses Cloud Translation API). + */ +export declare class FirestoreTranslateText { + private instanceId; + private params; + events: string[]; + readonly FIREBASE_EXTENSION_REFERENCE = "firebase/firestore-translate-text@0.1.18"; + readonly EXTENSION_VERSION = "0.1.18"; + constructor(instanceId: string, params: FirestoreTranslateTextParams); + getInstanceId(): string; + getParams(): FirestoreTranslateTextParams; + /** + * Occurs when a trigger has been called within the Extension, and will include data such as the context of the trigger request. + */ + onStart( + callback: EventCallback, + options?: SimpleEventarcTriggerOptions + ): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when image resizing completes successfully. The event will contain further details about specific formats and sizes. + */ + onSuccess( + callback: EventCallback, + options?: SimpleEventarcTriggerOptions + ): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when an issue has been experienced in the Extension. This will include any error data that has been included within the Error Exception. + */ + onError( + callback: EventCallback, + options?: SimpleEventarcTriggerOptions + ): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when the function is settled. Provides no customized data other than the context. + */ + onCompletion( + callback: EventCallback, + options?: SimpleEventarcTriggerOptions + ): import("firebase-functions/v2").CloudFunction>; +} diff --git a/scripts/bin-test/extsdks/translate/index.js b/scripts/bin-test/extsdks/translate/index.js new file mode 100644 index 000000000..6721d13d6 --- /dev/null +++ b/scripts/bin-test/extsdks/translate/index.js @@ -0,0 +1,61 @@ +"use strict"; +/** + * Translate Text in Firestore SDK for firestore-translate-text@0.1.18 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for firestore-translate-text@0.1.18" + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FirestoreTranslateText = exports.firestoreTranslateText = void 0; +const eventarc_1 = require("firebase-functions/v2/eventarc"); +function firestoreTranslateText(instanceId, params) { + return new FirestoreTranslateText(instanceId, params); +} +exports.firestoreTranslateText = firestoreTranslateText; +/** + * Translate Text in Firestore + * Translates strings written to a Cloud Firestore collection into multiple languages (uses Cloud Translation API). + */ +class FirestoreTranslateText { + constructor(instanceId, params) { + this.instanceId = instanceId; + this.params = params; + this.events = []; + this.FIREBASE_EXTENSION_REFERENCE = "firebase/firestore-translate-text@0.1.18"; + this.EXTENSION_VERSION = "0.1.18"; + } + getInstanceId() { return this.instanceId; } + getParams() { return this.params; } + /** + * Occurs when a trigger has been called within the Extension, and will include data such as the context of the trigger request. + */ + onStart(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onStart"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onStart", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when image resizing completes successfully. The event will contain further details about specific formats and sizes. + */ + onSuccess(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onSuccess"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onSuccess", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when an issue has been experienced in the Extension. This will include any error data that has been included within the Error Exception. + */ + onError(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onError"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onError", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when the function is settled. Provides no customized data other than the context. + */ + onCompletion(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onCompletion"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onCompletion", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } +} +exports.FirestoreTranslateText = FirestoreTranslateText; diff --git a/scripts/bin-test/extsdks/translate/package.json b/scripts/bin-test/extsdks/translate/package.json new file mode 100644 index 000000000..964287a7e --- /dev/null +++ b/scripts/bin-test/extsdks/translate/package.json @@ -0,0 +1,4 @@ +{ + "name": "@firebase-extensions/firebase-firestore-translate-text-sdk", + "main": "./index.js" +} \ No newline at end of file diff --git a/scripts/bin-test/mocha-setup.ts b/scripts/bin-test/mocha-setup.ts new file mode 100644 index 000000000..c0c8e7185 --- /dev/null +++ b/scripts/bin-test/mocha-setup.ts @@ -0,0 +1,4 @@ +import chai from "chai"; +import chaiAsPromised from "chai-as-promised"; + +chai.use(chaiAsPromised); diff --git a/scripts/bin-test/run.sh b/scripts/bin-test/run.sh new file mode 100755 index 000000000..dc384233a --- /dev/null +++ b/scripts/bin-test/run.sh @@ -0,0 +1,29 @@ +#!/bin/bash +set -ex # Immediately exit on failure + +# Link the Functions SDK for the testing environment. +if [ "$SKIP_BUILD" != "true" ]; then + npm run build +fi +npm link + +# Link the extensions SDKs for the testing environment. +(cd scripts/bin-test/extsdks/local && npm link) +(cd scripts/bin-test/extsdks/translate && npm link) +(cd scripts/bin-test/extsdks/translate && npm link firebase-functions) + +# Link SDKs to all test sources. +for f in scripts/bin-test/sources/*; do + if [ -d "$f" ]; then + (cd "$f" && npm link firebase-functions) + (cd "$f" && npm link @firebase-extensions/firebase-firestore-translate-text-sdk) + (cd "$f" && npm link @firebase-extensions/local-backfill-sdk) + fi +done + +# Make sure firebase-functions binary is executable +chmod +x ./lib/bin/firebase-functions.js + +mocha \ + --file ./scripts/bin-test/mocha-setup.ts \ + ./scripts/bin-test/test.ts diff --git a/scripts/bin-test/sources/broken-syntax/index.js b/scripts/bin-test/sources/broken-syntax/index.js new file mode 100644 index 000000000..05cbeaa10 --- /dev/null +++ b/scripts/bin-test/sources/broken-syntax/index.js @@ -0,0 +1,6 @@ +const functions = require("firebase-functions"); + +// This will cause a syntax error +exports.broken = functions.https.onRequest((request, response) => { + response.send("Hello from Firebase!" +}); // Missing closing parenthesis \ No newline at end of file diff --git a/scripts/bin-test/sources/broken-syntax/package.json b/scripts/bin-test/sources/broken-syntax/package.json new file mode 100644 index 000000000..bfada79a1 --- /dev/null +++ b/scripts/bin-test/sources/broken-syntax/package.json @@ -0,0 +1,3 @@ +{ + "name": "broken-syntax" +} \ No newline at end of file diff --git a/scripts/bin-test/sources/commonjs-grouped/g1.js b/scripts/bin-test/sources/commonjs-grouped/g1.js new file mode 100644 index 000000000..f204e44d4 --- /dev/null +++ b/scripts/bin-test/sources/commonjs-grouped/g1.js @@ -0,0 +1,9 @@ +const functions = require("firebase-functions/v1"); + +exports.groupedhttp = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.groupedcallable = functions.https.onCall(() => { + return "PASS"; +}); diff --git a/scripts/bin-test/sources/commonjs-grouped/index.js b/scripts/bin-test/sources/commonjs-grouped/index.js new file mode 100644 index 000000000..6d8540915 --- /dev/null +++ b/scripts/bin-test/sources/commonjs-grouped/index.js @@ -0,0 +1,46 @@ +const functions = require("firebase-functions/v1"); +const functionsv2 = require("firebase-functions/v2"); +const firestoreTranslateText = require("@firebase-extensions/firebase-firestore-translate-text-sdk").firestoreTranslateText; +const backfill = require("@firebase-extensions/local-backfill-sdk").backfill; + + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); + +exports.g1 = require("./g1"); diff --git a/scripts/bin-test/sources/commonjs-grouped/package.json b/scripts/bin-test/sources/commonjs-grouped/package.json new file mode 100644 index 000000000..1ec99f52f --- /dev/null +++ b/scripts/bin-test/sources/commonjs-grouped/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs-grouped" +} diff --git a/scripts/bin-test/sources/commonjs-main/functions.js b/scripts/bin-test/sources/commonjs-main/functions.js new file mode 100644 index 000000000..dd651bf6d --- /dev/null +++ b/scripts/bin-test/sources/commonjs-main/functions.js @@ -0,0 +1,43 @@ +const functions = require("firebase-functions/v1"); +const functionsv2 = require("firebase-functions/v2"); +const firestoreTranslateText = require("@firebase-extensions/firebase-firestore-translate-text-sdk").firestoreTranslateText; +const backfill = require("@firebase-extensions/local-backfill-sdk").backfill; + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); diff --git a/scripts/bin-test/sources/commonjs-main/package.json b/scripts/bin-test/sources/commonjs-main/package.json new file mode 100644 index 000000000..a781259f8 --- /dev/null +++ b/scripts/bin-test/sources/commonjs-main/package.json @@ -0,0 +1,4 @@ +{ + "name": "commonjs-main", + "main": "functions.js" +} diff --git a/scripts/bin-test/sources/commonjs-preserve/index.js b/scripts/bin-test/sources/commonjs-preserve/index.js new file mode 100644 index 000000000..1aa98e117 --- /dev/null +++ b/scripts/bin-test/sources/commonjs-preserve/index.js @@ -0,0 +1,43 @@ +const functions = require("firebase-functions/v1"); +const functionsv2 = require("firebase-functions/v2"); +const firestoreTranslateText = require("@firebase-extensions/firebase-firestore-translate-text-sdk").firestoreTranslateText; +const backfill = require("@firebase-extensions/local-backfill-sdk").backfill; + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1httpPreserve = functions + .runWith({ preserveExternalChanges: true }) + .https.onRequest((req, resp) => { + resp.status(200).send("PASS"); + }); + +functionsv2.setGlobalOptions({ preserveExternalChanges: true }); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/scripts/bin-test/sources/commonjs-preserve/package.json b/scripts/bin-test/sources/commonjs-preserve/package.json new file mode 100644 index 000000000..7fdf4e928 --- /dev/null +++ b/scripts/bin-test/sources/commonjs-preserve/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs-preserve" +} diff --git a/scripts/bin-test/sources/commonjs/index.js b/scripts/bin-test/sources/commonjs/index.js new file mode 100644 index 000000000..dd651bf6d --- /dev/null +++ b/scripts/bin-test/sources/commonjs/index.js @@ -0,0 +1,43 @@ +const functions = require("firebase-functions/v1"); +const functionsv2 = require("firebase-functions/v2"); +const firestoreTranslateText = require("@firebase-extensions/firebase-firestore-translate-text-sdk").firestoreTranslateText; +const backfill = require("@firebase-extensions/local-backfill-sdk").backfill; + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); diff --git a/scripts/bin-test/sources/commonjs/package.json b/scripts/bin-test/sources/commonjs/package.json new file mode 100644 index 000000000..30e1b1b27 --- /dev/null +++ b/scripts/bin-test/sources/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs" +} diff --git a/scripts/bin-test/sources/esm-ext/index.mjs b/scripts/bin-test/sources/esm-ext/index.mjs new file mode 100644 index 000000000..8986236b7 --- /dev/null +++ b/scripts/bin-test/sources/esm-ext/index.mjs @@ -0,0 +1,41 @@ +import * as functions from "firebase-functions/v1"; +import * as functionsv2 from "firebase-functions/v2"; +import { firestoreTranslateText } from "@firebase-extensions/firebase-firestore-translate-text-sdk"; +import { backfill } from "@firebase-extensions/local-backfill-sdk"; + +export const v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +export const v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +export const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); + +// A Firebase function defined by extension event +export const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); + +// A Firebase extension by localPath +export const extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/scripts/bin-test/sources/esm-ext/package.json b/scripts/bin-test/sources/esm-ext/package.json new file mode 100644 index 000000000..facb175c2 --- /dev/null +++ b/scripts/bin-test/sources/esm-ext/package.json @@ -0,0 +1,4 @@ +{ + "name": "esm-ext", + "main": "index.mjs" +} diff --git a/scripts/bin-test/sources/esm-main/functions.js b/scripts/bin-test/sources/esm-main/functions.js new file mode 100644 index 000000000..55186cc26 --- /dev/null +++ b/scripts/bin-test/sources/esm-main/functions.js @@ -0,0 +1,41 @@ +import * as functions from "firebase-functions/v1"; +import * as functionsv2 from "firebase-functions/v2"; +import { firestoreTranslateText } from "@firebase-extensions/firebase-firestore-translate-text-sdk"; +import { backfill } from "@firebase-extensions/local-backfill-sdk"; + +export const v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +export const v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +export const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); + +// A Firebase function defined by extension event +export const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); + +// A Firebase extension by localPath +export const extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/scripts/bin-test/sources/esm-main/package.json b/scripts/bin-test/sources/esm-main/package.json new file mode 100644 index 000000000..6c0840b1d --- /dev/null +++ b/scripts/bin-test/sources/esm-main/package.json @@ -0,0 +1,5 @@ +{ + "name": "esm-main", + "main": "functions.js", + "type": "module" +} diff --git a/scripts/bin-test/sources/esm-top-level-await/exports.js b/scripts/bin-test/sources/esm-top-level-await/exports.js new file mode 100644 index 000000000..50ece433e --- /dev/null +++ b/scripts/bin-test/sources/esm-top-level-await/exports.js @@ -0,0 +1,3 @@ +export const fn = () => { + return null; +} \ No newline at end of file diff --git a/scripts/bin-test/sources/esm-top-level-await/index.js b/scripts/bin-test/sources/esm-top-level-await/index.js new file mode 100644 index 000000000..05d2e5eca --- /dev/null +++ b/scripts/bin-test/sources/esm-top-level-await/index.js @@ -0,0 +1,8 @@ +import * as functionsv2 from "firebase-functions/v2"; + +const { fn } = await import('./exports.js'); + +export const v2http = functionsv2.https.onRequest((req, resp) => { + fn() + resp.status(200).send("PASS"); +}); diff --git a/scripts/bin-test/sources/esm-top-level-await/package.json b/scripts/bin-test/sources/esm-top-level-await/package.json new file mode 100644 index 000000000..9cb65cb9f --- /dev/null +++ b/scripts/bin-test/sources/esm-top-level-await/package.json @@ -0,0 +1,4 @@ +{ + "name": "esm", + "type": "module" +} diff --git a/scripts/bin-test/sources/esm/index.js b/scripts/bin-test/sources/esm/index.js new file mode 100644 index 000000000..55186cc26 --- /dev/null +++ b/scripts/bin-test/sources/esm/index.js @@ -0,0 +1,41 @@ +import * as functions from "firebase-functions/v1"; +import * as functionsv2 from "firebase-functions/v2"; +import { firestoreTranslateText } from "@firebase-extensions/firebase-firestore-translate-text-sdk"; +import { backfill } from "@firebase-extensions/local-backfill-sdk"; + +export const v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +export const v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +export const v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +export const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); + +// A Firebase function defined by extension event +export const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); + +// A Firebase extension by localPath +export const extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/scripts/bin-test/sources/esm/package.json b/scripts/bin-test/sources/esm/package.json new file mode 100644 index 000000000..9cb65cb9f --- /dev/null +++ b/scripts/bin-test/sources/esm/package.json @@ -0,0 +1,4 @@ +{ + "name": "esm", + "type": "module" +} diff --git a/scripts/bin-test/test.ts b/scripts/bin-test/test.ts new file mode 100644 index 000000000..d24eec5cd --- /dev/null +++ b/scripts/bin-test/test.ts @@ -0,0 +1,433 @@ +import * as subprocess from "child_process"; +import * as path from "path"; +import { promisify } from "util"; +import fs from "fs/promises"; +import * as os from "os"; + +import { expect } from "chai"; +import { parse as parseYaml } from "yaml"; +import fetch from "node-fetch"; +import * as portfinder from "portfinder"; + +const TIMEOUT_XL = 20_000; +const TIMEOUT_L = 10_000; +const TIMEOUT_M = 5_000; +const TIMEOUT_S = 1_000; + +const DEFAULT_OPTIONS = { + availableMemoryMb: null, + maxInstances: null, + minInstances: null, + timeoutSeconds: null, + vpc: null, + serviceAccountEmail: null, + ingressSettings: null, +}; + +const DEFAULT_V1_OPTIONS = { ...DEFAULT_OPTIONS }; + +const DEFAULT_V2_OPTIONS = { ...DEFAULT_OPTIONS, concurrency: null }; + +const BASE_EXTENSIONS = { + extRef1: { + params: { + COLLECTION_PATH: "collection1", + INPUT_FIELD_NAME: "input1", + LANGUAGES: "de,es", + OUTPUT_FIELD_NAME: "translated", + _EVENT_ARC_REGION: "us-central1", + "firebaseextensions.v1beta.function/location": "us-central1", + }, + ref: "firebase/firestore-translate-text@0.1.18", + events: ["firebase.extensions.firestore-translate-text.v1.onStart"], + }, + extLocal2: { + params: { + DO_BACKFILL: "False", + LOCATION: "us-central1", + }, + localPath: "./functions/generated/extensions/local/backfill/0.0.2/src", + events: [], + }, +}; + +const BASE_STACK = { + endpoints: { + v1http: { + ...DEFAULT_V1_OPTIONS, + platform: "gcfv1", + entryPoint: "v1http", + httpsTrigger: {}, + }, + v1callable: { + ...DEFAULT_V1_OPTIONS, + platform: "gcfv1", + entryPoint: "v1callable", + labels: {}, + callableTrigger: {}, + }, + v2http: { + ...DEFAULT_V2_OPTIONS, + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + }, + v2callable: { + ...DEFAULT_V2_OPTIONS, + platform: "gcfv2", + entryPoint: "v2callable", + labels: {}, + callableTrigger: {}, + }, + ttOnStart: { + ...DEFAULT_V2_OPTIONS, + platform: "gcfv2", + entryPoint: "ttOnStart", + labels: {}, + region: ["us-central1"], + eventTrigger: { + eventType: "firebase.extensions.firestore-translate-text.v1.onStart", + eventFilters: {}, + retry: false, + channel: "projects/locations/us-central1/channels/firebase", + }, + }, + }, + requiredAPIs: [ + { + api: "eventarcpublishing.googleapis.com", + reason: "Needed for custom event functions", + }, + ], + specVersion: "v1alpha1", + extensions: BASE_EXTENSIONS, +}; + +interface Testcase { + name: string; + modulePath: string; + expected: Record; +} + +interface DiscoveryResult { + success: boolean; + manifest?: Record; + error?: string; +} + +async function retryUntil( + fn: () => Promise, + timeoutMs: number, + sleepMs: number = TIMEOUT_S +) { + const sleep = () => { + return new Promise((resolve) => { + setTimeout(() => resolve(), sleepMs); + }); + }; + const timedOut = new Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error("retry timeout")); + }, timeoutMs); + }); + const retry = (async () => { + for (;;) { + if (await fn()) { + break; + } + await sleep(); + } + })(); + await Promise.race([retry, timedOut]); +} + +async function runHttpDiscovery(modulePath: string): Promise { + const getPort = promisify(portfinder.getPort) as () => Promise; + const port = await getPort(); + + const proc = subprocess.spawn("npx", ["firebase-functions"], { + cwd: path.resolve(modulePath), + env: { + PATH: process.env.PATH, + GCLOUD_PROJECT: "test-project", + PORT: port.toString(), + FUNCTIONS_CONTROL_API: "true", + }, + stdio: "inherit", + }); + + try { + // Wait for server to be ready + await retryUntil(async () => { + try { + await fetch(`http://localhost:${port}/__/functions.yaml`); + return true; + } catch (e: unknown) { + const error = e as { code?: string }; + if (error.code === "ECONNREFUSED") { + // This is an expected error during server startup, so we should retry. + return false; + } + // Any other error is unexpected and should fail the test immediately. + throw e; + } + }, TIMEOUT_L); + + const res = await fetch(`http://localhost:${port}/__/functions.yaml`); + const body = await res.text(); + + if (res.status === 200) { + const manifest = parseYaml(body) as Record; + return { success: true, manifest }; + } else { + return { success: false, error: body }; + } + } finally { + if (proc.pid) { + proc.kill(9); + await new Promise((resolve) => proc.on("exit", resolve)); + } + } +} + +async function runFileDiscovery(modulePath: string): Promise { + const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "firebase-functions-test-")); + const outputPath = path.join(tempDir, "manifest.json"); + + return new Promise((resolve, reject) => { + const proc = subprocess.spawn("npx", ["firebase-functions"], { + cwd: path.resolve(modulePath), + env: { + PATH: process.env.PATH, + GCLOUD_PROJECT: "test-project", + FUNCTIONS_MANIFEST_OUTPUT_PATH: outputPath, + }, + }); + + let stderr = ""; + + proc.stderr?.on("data", (chunk: Buffer) => { + stderr += chunk.toString("utf8"); + process.stderr.write(chunk); + }); + + proc.stdout?.on("data", (chunk: Buffer) => { + process.stdout.write(chunk); + }); + + const timeoutId = setTimeout(async () => { + if (proc.pid) { + proc.kill(9); + await new Promise((resolve) => proc.on("exit", resolve)); + } + resolve({ success: false, error: `File discovery timed out after ${TIMEOUT_M}ms` }); + }, TIMEOUT_M); + + proc.on("close", async (code) => { + clearTimeout(timeoutId); + + if (code === 0) { + try { + const manifestJson = await fs.readFile(outputPath, "utf8"); + const manifest = JSON.parse(manifestJson) as Record; + await fs.rm(tempDir, { recursive: true }).catch(() => { + // Ignore errors + }); + resolve({ success: true, manifest }); + } catch (e) { + resolve({ success: false, error: `Failed to read manifest file: ${e}` }); + } + } else { + const errorLines = stderr.split("\n").filter((line) => line.trim()); + const errorMessage = errorLines.join(" ") || "No error message found"; + resolve({ success: false, error: errorMessage }); + } + }); + + proc.on("error", (err) => { + clearTimeout(timeoutId); + // Clean up temp directory on error + fs.rm(tempDir, { recursive: true }).catch(() => { + // Ignore errors + }); + reject(err); + }); + }); +} + +describe("functions.yaml", function () { + // eslint-disable-next-line @typescript-eslint/no-invalid-this + this.timeout(TIMEOUT_XL); + + const discoveryMethods = [ + { name: "http", fn: runHttpDiscovery }, + { name: "file", fn: runFileDiscovery }, + ]; + + function runDiscoveryTests( + tc: Testcase, + discoveryFn: (path: string) => Promise + ) { + it("returns expected manifest", async function () { + // eslint-disable-next-line @typescript-eslint/no-invalid-this + this.timeout(TIMEOUT_M); + + const result = await discoveryFn(tc.modulePath); + expect(result.success).to.be.true; + expect(result.manifest).to.deep.equal(tc.expected); + }); + } + + describe("commonjs", function () { + // eslint-disable-next-line @typescript-eslint/no-invalid-this + this.timeout(TIMEOUT_L); + + const testcases: Testcase[] = [ + { + name: "basic", + modulePath: "./scripts/bin-test/sources/commonjs", + expected: BASE_STACK, + }, + { + name: "has main", + modulePath: "./scripts/bin-test/sources/commonjs-main", + expected: BASE_STACK, + }, + { + name: "grouped", + modulePath: "./scripts/bin-test/sources/commonjs-grouped", + expected: { + ...BASE_STACK, + endpoints: { + ...BASE_STACK.endpoints, + "g1-groupedhttp": { + ...DEFAULT_V1_OPTIONS, + platform: "gcfv1", + entryPoint: "g1.groupedhttp", + httpsTrigger: {}, + }, + "g1-groupedcallable": { + ...DEFAULT_V1_OPTIONS, + platform: "gcfv1", + entryPoint: "g1.groupedcallable", + labels: {}, + callableTrigger: {}, + }, + }, + }, + }, + { + name: "preserveChange", + modulePath: "./scripts/bin-test/sources/commonjs-preserve", + expected: { + endpoints: { + v1http: { + ...DEFAULT_V1_OPTIONS, + platform: "gcfv1", + entryPoint: "v1http", + httpsTrigger: {}, + }, + v1httpPreserve: { + platform: "gcfv1", + entryPoint: "v1httpPreserve", + httpsTrigger: {}, + }, + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + }, + ttOnStart: { + platform: "gcfv2", + entryPoint: "ttOnStart", + labels: {}, + region: ["us-central1"], + eventTrigger: { + eventType: "firebase.extensions.firestore-translate-text.v1.onStart", + eventFilters: {}, + retry: false, + channel: "projects/locations/us-central1/channels/firebase", + }, + }, + }, + requiredAPIs: [ + { + api: "eventarcpublishing.googleapis.com", + reason: "Needed for custom event functions", + }, + ], + specVersion: "v1alpha1", + extensions: BASE_EXTENSIONS, + }, + }, + ]; + + for (const tc of testcases) { + describe(tc.name, () => { + for (const discovery of discoveryMethods) { + describe(`${discovery.name} discovery`, () => { + runDiscoveryTests(tc, discovery.fn); + }); + } + }); + } + }); + + describe("esm", function () { + // eslint-disable-next-line @typescript-eslint/no-invalid-this + this.timeout(TIMEOUT_L); + + const testcases: Testcase[] = [ + { + name: "basic", + modulePath: "./scripts/bin-test/sources/esm", + expected: BASE_STACK, + }, + { + name: "with main", + + modulePath: "./scripts/bin-test/sources/esm-main", + expected: BASE_STACK, + }, + { + name: "with .m extension", + modulePath: "./scripts/bin-test/sources/esm-ext", + expected: BASE_STACK, + }, + ]; + + for (const tc of testcases) { + describe(tc.name, () => { + for (const discovery of discoveryMethods) { + describe(`${discovery.name} discovery`, () => { + runDiscoveryTests(tc, discovery.fn); + }); + } + }); + } + }); + + describe("error handling", () => { + const errorTestcases = [ + { + name: "broken syntax", + modulePath: "./scripts/bin-test/sources/broken-syntax", + expectedError: "missing ) after argument list", + }, + ]; + + for (const tc of errorTestcases) { + describe(tc.name, () => { + for (const discovery of discoveryMethods) { + it(`${discovery.name} discovery handles error correctly`, async () => { + const result = await discovery.fn(tc.modulePath); + expect(result.success).to.be.false; + expect(result.error).to.include(tc.expectedError); + }); + } + }); + } + }); +}); diff --git a/scripts/fetch-regions b/scripts/fetch-regions new file mode 100755 index 000000000..d529cdf9b --- /dev/null +++ b/scripts/fetch-regions @@ -0,0 +1,7 @@ +#!/bin/bash + +if [ -z $1 ]; then + echo "Must provide a project id as first argument." && exit 1 +fi; + +gcloud functions regions list --project $1 --format=json | jq 'map(.locationId)' \ No newline at end of file diff --git a/scripts/publish-container/Dockerfile b/scripts/publish-container/Dockerfile new file mode 100644 index 000000000..435964588 --- /dev/null +++ b/scripts/publish-container/Dockerfile @@ -0,0 +1,12 @@ +FROM node:22.21.1 + +# Install dependencies +RUN apt-get update && \ + apt-get install -y curl git jq + +# Install npm at latest. +RUN npm install --global npm@latest + +# Install hub +RUN curl -fsSL --output hub.tgz https://github.com/github/hub/releases/download/v2.13.0/hub-linux-amd64-2.13.0.tgz +RUN tar --strip-components=2 -C /usr/bin -xf hub.tgz hub-linux-amd64-2.13.0/bin/hub diff --git a/scripts/publish-container/cloudbuild.yaml b/scripts/publish-container/cloudbuild.yaml new file mode 100644 index 000000000..da60d1233 --- /dev/null +++ b/scripts/publish-container/cloudbuild.yaml @@ -0,0 +1,4 @@ +steps: + - name: "gcr.io/cloud-builders/docker" + args: ["build", "-t", "gcr.io/$PROJECT_ID/package-builder", "."] +images: ["gcr.io/$PROJECT_ID/package-builder"] diff --git a/scripts/publish.sh b/scripts/publish.sh new file mode 100755 index 000000000..71cb0f33f --- /dev/null +++ b/scripts/publish.sh @@ -0,0 +1,160 @@ +#!/bin/bash +set -e + +printusage() { + echo "publish.sh " + echo "REPOSITORY_ORG and REPOSITORY_NAME should be set in the environment." + echo "e.g. REPOSITORY_ORG=user, REPOSITORY_NAME=repo" + echo "" + echo "Arguments:" + echo " version: 'patch', 'minor', or 'major'." +} + +VERSION=$1 +if [[ $VERSION == "" ]]; then + printusage + exit 1 +elif [[ ! ($VERSION == "patch" || $VERSION == "minor" || $VERSION == "major" || $VERSION == "prerelease") ]]; then + printusage + exit 1 +fi + +if [[ $REPOSITORY_ORG == "" ]]; then + printusage + exit 1 +fi +if [[ $REPOSITORY_NAME == "" ]]; then + printusage + exit 1 +fi + +WDIR=$(pwd) + +echo "Checking for commands..." +trap "echo 'Missing hub.'; exit 1" ERR +which hub &> /dev/null +trap - ERR + +trap "echo 'Missing node.'; exit 1" ERR +which node &> /dev/null +trap - ERR + +trap "echo 'Missing jq.'; exit 1" ERR +which jq &> /dev/null +trap - ERR +echo "Checked for commands." + +echo "Checking for Twitter credentials..." +trap "echo 'Missing Twitter credentials.'; exit 1" ERR +test -f "${WDIR}/scripts/twitter.json" +trap - ERR +echo "Checked for Twitter credentials..." + +echo "Checking for logged-in npm user..." +trap "echo 'Please login to npm using \`npm login --registry https://wombat-dressing-room.appspot.com\`'; exit 1" ERR +npm whoami --registry https://wombat-dressing-room.appspot.com +trap - ERR +echo "Checked for logged-in npm user." + +echo "Moving to temporary directory.." +TEMPDIR=$(mktemp -d) +echo "[DEBUG] ${TEMPDIR}" +cd "${TEMPDIR}" +echo "Moved to temporary directory." + +echo "Cloning repository..." +git clone "git@github.com:${REPOSITORY_ORG}/${REPOSITORY_NAME}.git" +cd "${REPOSITORY_NAME}" +echo "Cloned repository." + +echo "Making sure there is a changelog..." +if [ ! -s CHANGELOG.md ]; then + echo "CHANGELOG.md is empty. aborting." + exit 1 +fi +echo "Made sure there is a changelog." + +echo "Running npm ci..." +npm ci +echo "Ran npm ci." + +echo "Running tests..." +npm test +npm run test:bin +echo "Ran tests." + +echo "Running publish build..." +npm run build +echo "Ran publish build." + +echo "Making a $VERSION version..." +if [[ $PRE_RELEASE != "" ]]; then + if [[ $VERSION == "prerelease" ]]; then + npm version prerelease --preid=rc + else + npm version pre$VERSION --preid=rc + fi +else + npm version $VERSION +fi +NEW_VERSION=$(jq -r ".version" package.json) +echo "Made a $NEW_VERSION version." + +echo "Making the release notes..." +RELEASE_NOTES_FILE=$(mktemp) +echo "[DEBUG] ${RELEASE_NOTES_FILE}" +echo "v${NEW_VERSION}" >> "${RELEASE_NOTES_FILE}" +echo "" >> "${RELEASE_NOTES_FILE}" +cat CHANGELOG.md >> "${RELEASE_NOTES_FILE}" +echo "Made the release notes." + +echo "Publishing to npm..." +PUBLISH_ARGS=() +if [[ -n "$DRY_RUN" ]]; then + echo "DRY RUN: running publish with --dry-run" + PUBLISH_ARGS+=(--dry-run) +fi + +if [[ -n "$PRE_RELEASE" ]]; then + PUBLISH_ARGS+=(--tag next) +fi + +npm publish "${PUBLISH_ARGS[@]}" +echo "Published to npm." + +echo "Pushing to GitHub..." +git push origin master --tags +echo "Pushed to GitHub." + +if [[ $PRE_RELEASE != "" ]]; then + echo "Published a pre-release version. Skipping post-release actions." + exit +fi + +if [[ $DRY_RUN != "" ]]; then + echo "All other commands are mutations, and we are doing a dry run." + echo "Terminating." + exit +fi + +echo "Cleaning up release notes..." +rm CHANGELOG.md +touch CHANGELOG.md +git commit -m "[firebase-release] Removed change log and reset repo after ${NEW_VERSION} release" CHANGELOG.md +echo "Cleaned up release notes." + +echo "Pushing to GitHub..." +# Push the changelog cleanup commit. +git push origin master --tags +echo "Pushed to GitHub." + +echo "Publishing release notes..." +hub release create --file "${RELEASE_NOTES_FILE}" "v${NEW_VERSION}" +echo "Published release notes." + +# Temporarily disable Twitter integration +#echo "Making the tweet..." +#npm install --no-save twitter@1.7.1 +#cp -v "${WDIR}/scripts/twitter.json" "${TEMPDIR}/${REPOSITORY_NAME}/scripts/" +#node ./scripts/tweet.js ${NEW_VERSION} +#echo "Made the tweet." diff --git a/scripts/publish/cloudbuild.yaml b/scripts/publish/cloudbuild.yaml new file mode 100644 index 000000000..110689cf6 --- /dev/null +++ b/scripts/publish/cloudbuild.yaml @@ -0,0 +1,116 @@ +steps: + # Decrypt the SSH key. + - name: "gcr.io/cloud-builders/gcloud" + args: + [ + "kms", + "decrypt", + "--ciphertext-file=deploy_key.enc", + "--plaintext-file=/root/.ssh/id_rsa", + "--location=global", + "--keyring=${_KEY_RING}", + "--key=${_KEY_NAME}", + ] + + # Decrypt the Twitter credentials. + - name: "gcr.io/cloud-builders/gcloud" + args: + [ + "kms", + "decrypt", + "--ciphertext-file=twitter.json.enc", + "--plaintext-file=twitter.json", + "--location=global", + "--keyring=${_KEY_RING}", + "--key=${_KEY_NAME}", + ] + + # Decrypt the npm credentials. + - name: "gcr.io/cloud-builders/gcloud" + args: + [ + "kms", + "decrypt", + "--ciphertext-file=npmrc.enc", + "--plaintext-file=npmrc", + "--location=global", + "--keyring=${_KEY_RING}", + "--key=${_KEY_NAME}", + ] + + # Decrypt the hub (GitHub) credentials. + - name: "gcr.io/cloud-builders/gcloud" + args: + [ + "kms", + "decrypt", + "--ciphertext-file=hub.enc", + "--plaintext-file=hub", + "--location=global", + "--keyring=${_KEY_RING}", + "--key=${_KEY_NAME}", + ] + + # Set up git with key and domain. + - name: "gcr.io/cloud-builders/git" + entrypoint: "bash" + args: + - "-c" + - | + chmod 600 /root/.ssh/id_rsa + cat </root/.ssh/config + Hostname github.com + IdentityFile /root/.ssh/id_rsa + EOF + ssh-keyscan github.com >> /root/.ssh/known_hosts + + # Clone the repository. + - name: "gcr.io/cloud-builders/git" + args: ["clone", "git@github.com:${_REPOSITORY_ORG}/${_REPOSITORY_NAME}"] + + # Set up the Git configuration. + - name: "gcr.io/cloud-builders/git" + dir: "${_REPOSITORY_NAME}" + args: ["config", "--global", "user.email", "firebase-oss-bot@google.com"] + - name: "gcr.io/cloud-builders/git" + dir: "${_REPOSITORY_NAME}" + args: ["config", "--global", "user.name", "Google Open Source Bot"] + + # Set up the Twitter credentials. + - name: "gcr.io/$PROJECT_ID/package-builder" + entrypoint: "cp" + args: ["-v", "twitter.json", "${_REPOSITORY_NAME}/scripts/twitter.json"] + + # Set up the npm credentials. + - name: "gcr.io/$PROJECT_ID/package-builder" + entrypoint: "bash" + args: ["-c", "cp -v npmrc ~/.npmrc"] + + # Set up the hub credentials for package-builder. + - name: "gcr.io/$PROJECT_ID/package-builder" + entrypoint: "bash" + args: ["-c", "mkdir -vp ~/.config && cp -v hub ~/.config/hub"] + + # Publish the package. + - name: "gcr.io/$PROJECT_ID/package-builder" + dir: "${_REPOSITORY_NAME}" + args: ["bash", "./scripts/publish.sh", "${_VERSION}"] + env: + - "REPOSITORY_ORG=${_REPOSITORY_ORG}" + - "REPOSITORY_NAME=${_REPOSITORY_NAME}" + - "DRY_RUN=${_DRY_RUN}" + - "PRE_RELEASE=${_PRE_RELEASE}" + +options: + volumes: + - name: "ssh" + path: /root/.ssh + +substitutions: + _VERSION: "" + _PRE_RELEASE: "" + _DRY_RUN: "" + _KEY_RING: "npm-publish-keyring" + _KEY_NAME: "publish" + _REPOSITORY_ORG: "firebase" + _REPOSITORY_NAME: "firebase-functions" diff --git a/scripts/publish/deploy_key.enc b/scripts/publish/deploy_key.enc new file mode 100644 index 000000000..127551f08 Binary files /dev/null and b/scripts/publish/deploy_key.enc differ diff --git a/scripts/publish/hub.enc b/scripts/publish/hub.enc new file mode 100644 index 000000000..a20ac3378 Binary files /dev/null and b/scripts/publish/hub.enc differ diff --git a/scripts/publish/npmrc.enc b/scripts/publish/npmrc.enc new file mode 100644 index 000000000..da8ea49bb Binary files /dev/null and b/scripts/publish/npmrc.enc differ diff --git a/scripts/publish/twitter.json.enc b/scripts/publish/twitter.json.enc new file mode 100644 index 000000000..82123a04d Binary files /dev/null and b/scripts/publish/twitter.json.enc differ diff --git a/scripts/test-packaging.sh b/scripts/test-packaging.sh new file mode 100755 index 000000000..82a2b1671 --- /dev/null +++ b/scripts/test-packaging.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -eux + +# Argument 1: Path to a pre-built tarball. +# If not provided, the script will run 'npm run build' and 'npm pack' locally. +PREBUILT_TARBALL="$1" + +# Setup cleanup +WORK_DIR=$(mktemp -d) +function cleanup { + rm -rf "$WORK_DIR" + echo "Deleted temp working directory $WORK_DIR" +} +trap cleanup EXIT + +# Save current directory to resolve relative paths later +START_DIR="$(pwd)" +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +if [ -n "$PREBUILT_TARBALL" ]; then + echo "Using prebuilt tarball: $PREBUILT_TARBALL" + # Resolve absolute path if it's relative + if [[ "$PREBUILT_TARBALL" != /* ]]; then + PREBUILT_TARBALL="$START_DIR/$PREBUILT_TARBALL" + fi + TARBALL_PATH="$PREBUILT_TARBALL" +else + echo "Building project..." + cd "$SCRIPT_DIR/.." + npm run build + + echo "Packing project..." + TARBALL=$(npm pack) + mv "$TARBALL" "$WORK_DIR/" + TARBALL_PATH="$WORK_DIR/$TARBALL" +fi + +echo "Setting up test project in $WORK_DIR..." +pushd "$WORK_DIR" > /dev/null +npm init -y > /dev/null +npm install "$TARBALL_PATH" + +echo "Running verification script..." +cp "$SCRIPT_DIR/verify-exports.mjs" . +node verify-exports.mjs + +popd > /dev/null diff --git a/scripts/tweet.js b/scripts/tweet.js new file mode 100644 index 000000000..be6229574 --- /dev/null +++ b/scripts/tweet.js @@ -0,0 +1,52 @@ +"use strict"; + +const fs = require("fs"); +const Twitter = require("twitter"); + +function printUsage() { + console.error( + ` +Usage: tweet.js + +Credentials must be stored in "twitter.json" in this directory. + +Arguments: + - version: Version of module that was released. e.g. "1.2.3" +` + ); + process.exit(1); +} + +function getUrl(version) { + return `https://github.com/firebase/firebase-functions/releases/tag/v${version}`; +} + +if (process.argv.length !== 3) { + console.error("Missing arguments."); + printUsage(); +} + +const version = process.argv.pop(); +if (!version.match(/^\d+\.\d+\.\d+$/)) { + console.error(`Version "${version}" not a version number.`); + printUsage(); +} + +if (!fs.existsSync(`${__dirname}/twitter.json`)) { + console.error("Missing credentials."); + printUsage(); +} +const creds = require("./twitter.json"); + +const client = new Twitter(creds); + +client.post( + "statuses/update", + { status: `v${version} of @Firebase SDK for Cloud Functions is available. Release notes: ${getUrl(version)}` }, + (err) => { + if (err) { + console.error(err); + process.exit(1); + } + } +); diff --git a/scripts/verify-exports.mjs b/scripts/verify-exports.mjs new file mode 100644 index 000000000..df4d1d876 --- /dev/null +++ b/scripts/verify-exports.mjs @@ -0,0 +1,54 @@ +import fs from 'fs'; +import path from 'path'; +import { createRequire } from 'module'; + +const require = createRequire(import.meta.url); + +// Read the package.json of the INSTALLED package to verify what was actually packed +const pkgPath = path.resolve(process.cwd(), 'node_modules/firebase-functions/package.json'); +if (!fs.existsSync(pkgPath)) { + console.error(`❌ Could not find installed package at ${pkgPath}`); + process.exit(1); +} + +const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8')); +const exports = Object.keys(pkg.exports || {}); + +// Filter out non-code entrypoints (e.g. package.json if it were exported) +const entryPoints = exports.filter(e => !e.endsWith('.json')); + +console.log(`Found ${entryPoints.length} entry points to verify.`); + +let hasError = false; + +async function verify() { + console.log('\n--- Verifying Entry Points (CJS & ESM) ---'); + for (const exp of entryPoints) { + const importPath = exp === '.' ? 'firebase-functions' : `firebase-functions/${exp.replace('./', '')}`; + + try { + require(importPath); + console.log(`✅ CJS: ${importPath}`); + } catch (e) { + console.error(`❌ CJS Failed: ${importPath}`, e.message); + hasError = true; + } + + try { + await import(importPath); + console.log(`✅ ESM: ${importPath}`); + } catch (e) { + console.error(`❌ ESM Failed: ${importPath}`, e.message); + hasError = true; + } + } + + if (hasError) { + console.error('\n❌ Verification failed with errors.'); + process.exit(1); + } else { + console.log('\n✨ All entry points verified successfully!'); + } +} + +verify(); diff --git a/spec/apps.spec.ts b/spec/apps.spec.ts deleted file mode 100644 index 20c654cd0..000000000 --- a/spec/apps.spec.ts +++ /dev/null @@ -1,142 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { expect } from 'chai'; -import { apps as appsNamespace } from '../src/apps'; -import * as firebase from 'firebase-admin'; -import * as _ from 'lodash'; -import * as sinon from 'sinon'; - -describe('apps', () => { - let apps: appsNamespace.Apps; - let claims; - beforeEach(() => { - apps = new appsNamespace.Apps(); - // mock claims intentionally contains dots, square brackets, and nested paths - claims = {'token': {'firebase': {'identities':{'google.com':['111']}}}}; - }); - - afterEach(() => { - _.forEach(firebase.apps, app => { - app.delete(); - }); - }); - - describe('retain/release', () => { - let clock: sinon.SinonFakeTimers; - - beforeEach(() => { - clock = sinon.useFakeTimers(); - }); - - afterEach(() => { - clock.restore(); - }); - - it('should retain/release ref counters appropriately', function() { - apps.retain(); - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 1, - }); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval); - return Promise.resolve().then(() => { - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 0, - }); - }); - }); - - it('should only decrement counter after garbageCollectionInterval is up', function() { - apps.retain(); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 1, - }); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - return Promise.resolve().then(() => { - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 0, - }); - }); - }); - - it('should call _destroyApp if app no longer used', function() { - let spy = sinon.spy(apps, '_destroyApp'); - apps.retain(); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval); - return Promise.resolve().then(() => { - expect(spy.called).to.be.true; - }); - }); - - it('should not call _destroyApp if app used again while waiting for release', function() { - let spy = sinon.spy(apps, '_destroyApp'); - apps.retain(); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - apps.retain(); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - return Promise.resolve().then(() => { - expect(spy.called).to.be.false; - }); - }); - - it('should increment ref counter for each subsequent retain', function() { - apps.retain(); - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 1, - }); - apps.retain(); - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 2, - }); - apps.retain(); - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 3, - }); - }); - - it('should work with staggering sets of retain/release', function() { - apps.retain(); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - apps.retain(); - apps.release(); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - return Promise.resolve().then(() => { - // Counters are still 1 due second set of retain/release - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 1, - }); - clock.tick(appsNamespace.garbageCollectionInterval / 2); - }).then(() => { - // It's now been a full interval since the second set of retain/release - expect(apps['_refCounter']).to.deep.equal({ - __admin__: 0, - }); - }); - }); - }); -}); diff --git a/spec/cloud-functions.spec.ts b/spec/cloud-functions.spec.ts deleted file mode 100644 index 7566ea494..000000000 --- a/spec/cloud-functions.spec.ts +++ /dev/null @@ -1,302 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as _ from 'lodash'; -import { expect } from 'chai'; -import { Event, EventContext, LegacyEvent, - makeCloudFunction, MakeCloudFunctionArgs, Change } from '../src/cloud-functions'; - -describe('makeCloudFunction', () => { - const cloudFunctionArgs: MakeCloudFunctionArgs = { - provider: 'mock.provider', - eventType: 'mock.event', - service: 'service', - triggerResource: () => 'resource', - handler: () => null, - }; - - it('should put a __trigger on the returned CloudFunction', () => { - let cf = makeCloudFunction(cloudFunctionArgs); - expect(cf.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'mock.provider.mock.event', - resource: 'resource', - service: 'service', - }, - }); - }); - - it('should construct the right context for legacy event format', () => { - let args: any = _.assign({}, cloudFunctionArgs, {handler: (data: any, context: EventContext) => context}); - let cf = makeCloudFunction(args); - let test: LegacyEvent = { - eventId: '00000', - timestamp: '2016-11-04T21:29:03.496Z', - eventType: 'providers/provider/eventTypes/event', - resource: 'resource', - data: 'data', - }; - - return expect(cf(test)).to.eventually.deep.equal({ - eventId: '00000', - timestamp: '2016-11-04T21:29:03.496Z', - eventType: 'mock.provider.mock.event', - resource: { - service: 'service', - name: 'resource', - }, - params: {}, - }); - }); - - it('should construct the right context for new event format', () => { - let args: any = _.assign({}, cloudFunctionArgs, { handler: (data: any, context: EventContext) => context }); - let cf = makeCloudFunction(args); - let test: Event = { - context: { - eventId: '00000', - timestamp: '2016-11-04T21:29:03.496Z', - eventType: 'provider.event', - resource: { - service: 'provider', - name: 'resource', - }, - }, - data: 'data', - }; - - return expect(cf(test)).to.eventually.deep.equal({ - eventId: '00000', - timestamp: '2016-11-04T21:29:03.496Z', - eventType: 'provider.event', - resource: { - service: 'provider', - name: 'resource', - }, - params: {}, - }); - }); -}); - -describe('makeParams', () => { - const args: MakeCloudFunctionArgs = { - provider: 'provider', - eventType: 'event', - service: 'service', - triggerResource: () => 'projects/_/instances/pid/ref/{foo}/nested/{bar}', - handler: (data, context) => context.params, - }; - const cf = makeCloudFunction(args); - - it('should construct params from the event resource of legacy events', () => { - const testEvent: LegacyEvent = { - resource: 'projects/_/instances/pid/ref/a/nested/b', - eventType: 'event', - data: 'data', - }; - - return expect(cf(testEvent)).to.eventually.deep.equal({ - foo: 'a', - bar: 'b', - }); - }); - - it('should construct params from the event resource of new format events', () => { - const testEvent: Event = { - context: { - eventId: '111', - timestamp: '2016-11-04T21:29:03.496Z', - resource: { - service: 'service', - name: 'projects/_/instances/pid/ref/a/nested/b', - }, - eventType: 'event', - }, - data: 'data', - }; - - return expect(cf(testEvent)).to.eventually.deep.equal({ - foo: 'a', - bar: 'b', - }); - }); -}); - -describe('makeAuth and makeAuthType', () => { - const args: MakeCloudFunctionArgs = { - provider: 'google.firebase.database', - eventType: 'event', - service: 'service', - triggerResource: () => 'projects/_/instances/pid/ref/{foo}/nested/{bar}', - handler: (data, context) => { - return { - auth: context.auth, - authMode: context.authType, - }; - }, - }; - let cf = makeCloudFunction(args); - - it('should construct correct auth and authType for admin user', () => { - const testEvent: LegacyEvent = { - data: 'data', - auth: { - admin: true, - }, - }; - - return expect(cf(testEvent)).to.eventually.deep.equal({ - auth: undefined, - authMode: 'ADMIN', - }); - }); - - it('should construct correct auth and authType for unauthenticated user', () => { - const testEvent: LegacyEvent = { - data: 'data', - auth: { - admin: false, - }, - }; - - return expect(cf(testEvent)).to.eventually.deep.equal({ - auth: null, - authMode: 'UNAUTHENTICATED', - }); - }); - - it('should construct correct auth and authType for a user', () => { - const testEvent: LegacyEvent = { - data: 'data', - auth: { - admin: false, - variable: { - uid: 'user', - provider: 'google', - token: { - sub: 'user', - }, - }, - }, - }; - - return expect(cf(testEvent)).to.eventually.deep.equal({ - auth: { - uid: 'user', - token: { - sub: 'user', - }, - }, - authMode: 'USER', - }); - }); -}); - -describe('Change', () => { - describe('applyFieldMask', () => { - const after = { - foo: 'bar', - num: 2, - obj: { - a: 1, - b: 2, - }, - }; - - it('should handle deleted values', () => { - const sparseBefore = { baz: 'qux' }; - const fieldMask = 'baz'; - expect(Change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal( { - foo: 'bar', - num: 2, - obj: { - a: 1, - b: 2, - }, - baz: 'qux', - }); - }); - - it('should handle created values', () => { - const sparseBefore = {}; - const fieldMask = 'num,obj.a'; - expect(Change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal({ - foo: 'bar', - obj: { - b: 2, - }, - }); - }); - - it('should handle mutated values', () => { - const sparseBefore = { - num: 3, - obj: { - a: 3, - }, - }; - const fieldMask = 'num,obj.a'; - expect(Change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal({ - foo: 'bar', - num: 3, - obj: { - a: 3, - b: 2, - }, - }); - }); - }); - - describe('fromJSON', () => { - it('should create a Change object with a `before` and `after`', () => { - let created = Change.fromJSON({ - before: { foo: 'bar' }, - after: { foo: 'faz' }, - }); - expect(created instanceof Change).to.equal(true); - expect(created.before).to.deep.equal({ foo: 'bar' }); - expect(created.after).to.deep.equal({ foo: 'faz' }); - }); - - it('should apply the customizer function to `before` and `after`', () => { - function customizer(input: any) { - _.set(input, 'another', 'value'); - return input as T; - } - let created = Change.fromJSON( - { - before: { foo: 'bar' }, - after: { foo: 'faz' }, - }, - customizer, - ); - expect(created.before).to.deep.equal({ - foo: 'bar', - another: 'value', - }); - expect(created.after).to.deep.equal({ - foo: 'faz', - another: 'value', - }); - }); - }); -}); diff --git a/spec/common/change.spec.ts b/spec/common/change.spec.ts new file mode 100644 index 000000000..661188320 --- /dev/null +++ b/spec/common/change.spec.ts @@ -0,0 +1,114 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as change from "../../src/common/change"; + +describe("Change", () => { + describe("applyFieldMask", () => { + const after = { + foo: "bar", + num: 2, + obj: { + a: 1, + b: 2, + }, + }; + + it("should handle deleted values", () => { + const sparseBefore = { baz: "qux" }; + const fieldMask = "baz"; + expect(change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal({ + foo: "bar", + num: 2, + obj: { + a: 1, + b: 2, + }, + baz: "qux", + }); + }); + + it("should handle created values", () => { + const sparseBefore = {}; + const fieldMask = "num,obj.a"; + expect(change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal({ + foo: "bar", + obj: { + b: 2, + }, + }); + }); + + it("should handle mutated values", () => { + const sparseBefore = { + num: 3, + obj: { + a: 3, + }, + }; + const fieldMask = "num,obj.a"; + expect(change.applyFieldMask(sparseBefore, after, fieldMask)).to.deep.equal({ + foo: "bar", + num: 3, + obj: { + a: 3, + b: 2, + }, + }); + }); + }); + + describe("fromJSON", () => { + it("should create a Change object with a `before` and `after`", () => { + const created = change.Change.fromJSON({ + before: { foo: "bar" }, + after: { foo: "faz" }, + }); + expect(created instanceof change.Change).to.equal(true); + expect(created.before).to.deep.equal({ foo: "bar" }); + expect(created.after).to.deep.equal({ foo: "faz" }); + }); + + it("should apply the customizer function to `before` and `after`", () => { + function customizer(input: any) { + input.another = "value"; + return input as T; + } + const created = change.Change.fromJSON( + { + before: { foo: "bar" }, + after: { foo: "faz" }, + }, + customizer + ); + expect(created.before).to.deep.equal({ + foo: "bar", + another: "value", + }); + expect(created.after).to.deep.equal({ + foo: "faz", + another: "value", + }); + }); + }); +}); diff --git a/spec/common/config.spec.ts b/spec/common/config.spec.ts new file mode 100644 index 000000000..8dc9fe9da --- /dev/null +++ b/spec/common/config.spec.ts @@ -0,0 +1,71 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import fs from "fs"; +import * as sinon from "sinon"; + +import { firebaseConfig, resetCache } from "../../src/common/config"; + +describe("firebaseConfig()", () => { + let readFileSync: sinon.SinonStub; + let cwdStub: sinon.SinonStub; + + before(() => { + readFileSync = sinon.stub(fs, "readFileSync"); + readFileSync.throws("Unexpected call"); + cwdStub = sinon.stub(process, "cwd"); + cwdStub.returns("/srv"); + }); + + after(() => { + sinon.verifyAndRestore(); + }); + + afterEach(() => { + resetCache(); + + delete process.env.FIREBASE_CONFIG; + delete process.env.K_CONFIGURATION; + }); + + it("loads Firebase configs from FIREBASE_CONFIG env variable", () => { + process.env.FIREBASE_CONFIG = JSON.stringify({ + databaseURL: "foo@firebaseio.com", + }); + expect(firebaseConfig()).to.have.property("databaseURL", "foo@firebaseio.com"); + }); + + it("loads Firebase configs from FIREBASE_CONFIG env variable pointing to a file", () => { + const oldEnv = process.env; + (process as any).env = { + ...oldEnv, + FIREBASE_CONFIG: ".firebaseconfig.json", + }; + try { + readFileSync.returns(Buffer.from('{"databaseURL": "foo@firebaseio.com"}')); + expect(firebaseConfig()).to.have.property("databaseURL", "foo@firebaseio.com"); + } finally { + (process as any).env = oldEnv; + } + }); +}); diff --git a/spec/common/encoding.spec.ts b/spec/common/encoding.spec.ts new file mode 100644 index 000000000..5e73b06cd --- /dev/null +++ b/spec/common/encoding.spec.ts @@ -0,0 +1,48 @@ +import { expect } from "chai"; +import { convertInvoker } from "../../src/common/encoding"; + +describe("convertInvoker", () => { + it("should raise an error on empty array", () => { + expect(() => convertInvoker([])).to.throw; + }); + + it("should raise an error on empty string", () => { + expect(() => convertInvoker("")).to.throw; + }); + + it("should raise an error on empty string with service accounts", () => { + expect(() => convertInvoker(["service-account@", ""])).to.throw; + }); + + it("should raise an error on mixing public and service accounts", () => { + expect(() => convertInvoker(["public", "service-account@"])).to.throw; + }); + + it("should raise an error on mixing private and service accounts", () => { + expect(() => convertInvoker(["private", "service-account@"])).to.throw; + }); + + it("should return the correct public invoker", () => { + const invoker = convertInvoker("public"); + + expect(invoker).to.deep.equal(["public"]); + }); + + it("should return the correct private invoker", () => { + const invoker = convertInvoker("private"); + + expect(invoker).to.deep.equal(["private"]); + }); + + it("should return the correct scalar invoker", () => { + const invoker = convertInvoker("service-account@"); + + expect(invoker).to.deep.equal(["service-account@"]); + }); + + it("should return the correct array invoker", () => { + const invoker = convertInvoker(["service-account1@", "service-account2@"]); + + expect(invoker).to.deep.equal(["service-account1@", "service-account2@"]); + }); +}); diff --git a/spec/common/metaprogramming.ts b/spec/common/metaprogramming.ts new file mode 100644 index 000000000..5c16a710b --- /dev/null +++ b/spec/common/metaprogramming.ts @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +// This method will fail to compile if value is not of the explicit parameter type. +/* eslint-disable @typescript-eslint/no-unused-vars,@typescript-eslint/no-empty-function */ +export function expectType(value: Type) {} +export function expectNever() {} +export function expectExtends() {} diff --git a/spec/common/options.ts b/spec/common/options.ts new file mode 100644 index 000000000..1e49863c4 --- /dev/null +++ b/spec/common/options.ts @@ -0,0 +1,36 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE ignoreUnusedWarning OR OTHER DEALINGS IN THE +// SOFTWARE. +import { ResettableKeys, ResetValue } from "../../src/common/options"; +import { expectNever, expectType } from "./metaprogramming"; + +describe("ResettableKeys", () => { + it("should pick out keys with a type that includes ResetValue", () => { + type A = { a: number; b: ResetValue; c: number | boolean | ResetValue }; + expectType>("b"); + expectType>("c"); + }); + + it("should return an empty type if no keys are resettable", () => { + type A = { a: number }; + expectNever>(); + }); +}); diff --git a/spec/common/params.spec.ts b/spec/common/params.spec.ts new file mode 100644 index 000000000..9887c743e --- /dev/null +++ b/spec/common/params.spec.ts @@ -0,0 +1,109 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE ignoreUnusedWarning OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { VarName, ParamsOf, Split } from "../../src/common/params"; +import { expectNever, expectType } from "./metaprogramming"; + +describe("Params namespace", () => { + describe("Split", () => { + // Note the subtle difference in the first two cases: + // if passed a string (instead of a string literal) then split returns a + // string[], which means "any number of elements as long as they are a string" + // but if passed a literal string "" then split returns [] which means "a + // tuple of zero elements". + + it("handles generic strings", () => { + expectType>([] as string[]); + }); + + it("handles empty strings", () => { + expectType>([]); + }); + + it("handles just a slash", () => { + expectType>([]); + }); + + it("handles literal strings with one component", () => { + expectType>(["a"]); + }); + + it("handles literal strings with more than one component", () => { + expectType>(["a", "b", "c"]); + }); + + it("strips leading slashes", () => { + expectType>(["a", "b", "c"]); + }); + }); + + describe("VarName", () => { + it("extracts nothing from strings without params", () => { + expectNever>(); + }); + + it("extracts {segment} captures", () => { + expectType>("uid"); + }); + + it("extracts {segment=*} captures", () => { + expectType>("uid"); + }); + + it("extracts {segment=**} captures", () => { + expectType>("uid"); + }); + }); + + describe("ParamsOf", () => { + it("falls back to Record without better type info", () => { + expectType>({} as Record); + }); + + it("is the empty object when there are no params", () => { + expectType>({} as Record); + }); + + it("extracts a single param", () => { + expectType>({ + uid: "uid", + } as const); + }); + + it("extracts multiple params", () => { + expectType>({ + uid: "hello", + log: "world", + } as const); + }); + + it("extracts strings with params interpolated", () => { + // NOTE: be wary of this test. Hover over the types to see what they're + // parsing as. When doing TDD this test surprisingly passed. That's + // because ParamsOf was returning the empty interface because it did + // not special case for Record. This meant that any input + // would pass the test. Fixing this issue in the test suite is as copmlex + // as fixing the bug to begin with and would probably share implementations. + expectType>({ uid: "uid" }); + }); + }); +}); diff --git a/spec/common/providers/https.spec.ts b/spec/common/providers/https.spec.ts new file mode 100644 index 000000000..9dc42b504 --- /dev/null +++ b/spec/common/providers/https.spec.ts @@ -0,0 +1,1107 @@ +import { expect } from "chai"; +import { App, initializeApp } from "firebase-admin/app"; +import * as appCheck from "firebase-admin/app-check"; +import * as sinon from "sinon"; +import * as nock from "nock"; + +import { getApp, setApp } from "../../../src/common/app"; +import * as debug from "../../../src/common/debug"; +import * as https from "../../../src/common/providers/https"; +import * as mocks from "../../fixtures/credential/key.json"; +import { + expectedResponseHeaders, + generateAppCheckToken, + generateIdToken, + generateUnsignedAppCheckToken, + generateUnsignedIdToken, + mockFetchAppCheckPublicJwks, + mockFetchPublicKeys, + mockRequest, +} from "../../fixtures/mockrequest"; +import { checkAppCheckContext, checkAuthContext, runHandler, RunHandlerResult } from "../../helper"; + +/** + * A CallTest is a specification for a test of a callable function that + * simulates triggering the http endpoint, and checks that the request + * and response are properly converted to their http equivalents. + */ +interface CallTest { + // An http request, mocking a subset of https.Request. + httpRequest: any; + + // The expected format of the request passed to the handler. + expectedData: any; + + // The function to execute with the request. + callableFunction: (data: any, context: https.CallableContext) => any; + + callableFunction2: (request: https.CallableRequest) => any; + + callableOption?: https.CallableOptions; + + // The expected shape of the http response returned to the callable SDK. + expectedHttpResponse: RunHandlerResult; +} + +// Runs a CallTest test. +async function runCallableTest(test: CallTest): Promise { + const opts = { + cors: { origin: true, methods: "POST" }, + ...test.callableOption, + }; + const callableFunctionV1 = https.onCallHandler( + opts, + (data, context) => { + expect(data).to.deep.equal(test.expectedData); + return test.callableFunction(data, context); + }, + "gcfv1" + ); + + const responseV1 = await runHandler(callableFunctionV1, test.httpRequest); + + expect(responseV1.body).to.deep.equal(JSON.stringify(test.expectedHttpResponse.body)); + expect(responseV1.headers).to.deep.equal(test.expectedHttpResponse.headers); + expect(responseV1.status).to.equal(test.expectedHttpResponse.status); + + const callableFunctionV2 = https.onCallHandler( + opts, + (request) => { + expect(request.data).to.deep.equal(test.expectedData); + return test.callableFunction2(request); + }, + "gcfv2" + ); + + const responseV2 = await runHandler(callableFunctionV2, test.httpRequest); + + expect(responseV2.body).to.deep.equal(JSON.stringify(test.expectedHttpResponse.body)); + expect(responseV2.headers).to.deep.equal(test.expectedHttpResponse.headers); + expect(responseV2.status).to.equal(test.expectedHttpResponse.status); +} + +describe("onCallHandler", () => { + let app: App; + + before(() => { + const credential = { + getAccessToken: () => { + return Promise.resolve({ + expires_in: 1000, + access_token: "fake", + }); + }, + getCertificate: () => { + return { + projectId: "aProjectId", + }; + }, + }; + app = initializeApp( + { + projectId: "aProjectId", + credential, + }, + "test-app" + ); + setApp(app); + }); + + after(() => { + setApp(undefined); + }); + + it("should handle success", () => { + return runCallableTest({ + httpRequest: mockRequest({ foo: "bar" }), + expectedData: { foo: "bar" }, + callableFunction: () => ({ baz: "qux" }), + callableFunction2: () => ({ baz: "qux" }), + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: { baz: "qux" } }, + }, + }); + }); + + it("should handle null data and return", () => { + return runCallableTest({ + httpRequest: mockRequest(null), + expectedData: null, + callableFunction: () => null, + callableFunction2: () => null, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should handle void return", () => { + return runCallableTest({ + httpRequest: mockRequest(null), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should reject bad method", () => { + const req = mockRequest(null); + req.method = "GET"; + return runCallableTest({ + httpRequest: req, + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 400, + headers: expectedResponseHeaders, + body: { + error: { message: "Bad Request", status: "INVALID_ARGUMENT" }, + }, + }, + }); + }); + + it("should ignore charset", () => { + return runCallableTest({ + httpRequest: mockRequest(null, "application/json; charset=utf-8"), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should reject bad content type", () => { + return runCallableTest({ + httpRequest: mockRequest(null, "text/plain"), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 400, + headers: expectedResponseHeaders, + body: { + error: { message: "Bad Request", status: "INVALID_ARGUMENT" }, + }, + }, + }); + }); + + it("should reject extra body fields", () => { + const req = mockRequest(null); + req.body.extra = "bad"; + return runCallableTest({ + httpRequest: req, + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 400, + headers: expectedResponseHeaders, + body: { + error: { message: "Bad Request", status: "INVALID_ARGUMENT" }, + }, + }, + }); + }); + + it("should handle unhandled error", () => { + return runCallableTest({ + httpRequest: mockRequest(null), + expectedData: null, + callableFunction: () => { + throw new Error(`ceci n'est pas une error`); + }, + callableFunction2: () => { + throw new Error(`cece n'est pas une error`); + }, + expectedHttpResponse: { + status: 500, + headers: expectedResponseHeaders, + body: { error: { message: "INTERNAL", status: "INTERNAL" } }, + }, + }); + }); + + it("should handle unknown error status", () => { + return runCallableTest({ + httpRequest: mockRequest(null), + expectedData: null, + callableFunction: () => { + throw new https.HttpsError("THIS_IS_NOT_VALID" as any, "nope"); + }, + callableFunction2: () => { + throw new https.HttpsError("THIS_IS_NOT_VALID" as any, "nope"); + }, + expectedHttpResponse: { + status: 500, + headers: expectedResponseHeaders, + body: { error: { message: "INTERNAL", status: "INTERNAL" } }, + }, + }); + }); + + it("should handle well-formed error", () => { + return runCallableTest({ + httpRequest: mockRequest(null), + expectedData: null, + callableFunction: () => { + throw new https.HttpsError("not-found", "i am error"); + }, + callableFunction2: () => { + throw new https.HttpsError("not-found", "i am error"); + }, + expectedHttpResponse: { + status: 404, + headers: expectedResponseHeaders, + body: { error: { message: "i am error", status: "NOT_FOUND" } }, + }, + }); + }); + + describe("auth", () => { + let mock: nock.Scope; + + before(() => { + mock = mockFetchPublicKeys(); + }); + + after(() => { + mock.done(); + }); + + it("should handle auth", async () => { + const projectId = getApp().options.projectId; + const idToken = generateIdToken(projectId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + authorization: "Bearer " + idToken, + }), + expectedData: null, + callableFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + callableFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should handle auth - case insensitive", async () => { + const projectId = getApp().options.projectId; + const idToken = generateIdToken(projectId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + authorization: "bearer " + idToken, + }), + expectedData: null, + callableFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + callableFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should reject auth with incorrect authorization header", async () => { + const projectId = getApp().options.projectId; + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + authorization: "Beaver heyyall", + }), + expectedData: null, + callableFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + callableFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedHttpResponse: { + status: 401, + headers: expectedResponseHeaders, + body: { + error: { + message: "Unauthenticated", + status: "UNAUTHENTICATED", + }, + }, + }, + }); + }); + + it("should reject bad auth with bad signature", async () => { + const projectId = getApp().options.projectId; + const idToken = generateUnsignedIdToken(projectId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + authorization: "Bearer " + idToken, + }), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 401, + headers: expectedResponseHeaders, + body: { + error: { + message: "Unauthenticated", + status: "UNAUTHENTICATED", + }, + }, + }, + }); + }); + }); + + describe("AppCheck", () => { + describe("verify token", () => { + let mock: nock.Scope; + + before(() => { + mock = mockFetchAppCheckPublicJwks(); + }); + + after(() => { + mock.done(); + }); + + it("should handle AppCheck token", async () => { + const projectId = getApp().options.projectId; + const appId = "123:web:abc"; + const appCheckToken = generateAppCheckToken(projectId, appId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { appCheckToken }), + expectedData: null, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + }, + callableFunction: (data, context) => { + checkAppCheckContext(context, projectId, appId); + return null; + }, + callableFunction2: (request) => { + checkAppCheckContext(request, projectId, appId); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should reject bad AppCheck token", async () => { + const projectId = getApp().options.projectId; + const appId = "123:web:abc"; + const appCheckToken = generateUnsignedAppCheckToken(projectId, appId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { appCheckToken }), + expectedData: null, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + }, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 401, + headers: expectedResponseHeaders, + body: { + error: { + message: "Unauthenticated", + status: "UNAUTHENTICATED", + }, + }, + }, + }); + }); + + it("should handle bad AppCheck token with enforcement disabled", async () => { + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + appCheckToken: "FAKE", + }), + expectedData: null, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: false, + }, + callableFunction: (data, context) => { + expect(context.app).to.be.undefined; + return; + }, + callableFunction2: (request) => { + expect(request.app).to.be.undefined; + return; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should handle bad AppCheck token with enforcement enabled", async () => { + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + appCheckToken: "FAKE", + }), + expectedData: null, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + }, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + expectedHttpResponse: { + status: 401, + headers: expectedResponseHeaders, + body: { + error: { + message: "Unauthenticated", + status: "UNAUTHENTICATED", + }, + }, + }, + }); + }); + + it("should handle no AppCheck token with enforcement enabled", async () => { + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + appCheckToken: "MISSING", + }), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + }, + expectedHttpResponse: { + status: 401, + headers: expectedResponseHeaders, + body: { + error: { + message: "Unauthenticated", + status: "UNAUTHENTICATED", + }, + }, + }, + }); + }); + + it("should handle instance id", async () => { + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + instanceIdToken: "iid-token", + }), + expectedData: null, + callableFunction: (data, context) => { + expect(context.auth).to.be.undefined; + expect(context.instanceIdToken).to.equal("iid-token"); + return null; + }, + callableFunction2: (request) => { + expect(request.auth).to.be.undefined; + expect(request.instanceIdToken).to.equal("iid-token"); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should expose raw request", async () => { + const mockReq = mockRequest(null, "application/json", {}); + await runCallableTest({ + httpRequest: mockReq, + expectedData: null, + callableFunction: (data, context) => { + expect(context.rawRequest).to.not.be.undefined; + expect(context.rawRequest).to.equal(mockReq); + return null; + }, + callableFunction2: (request) => { + expect(request.rawRequest).to.not.be.undefined; + expect(request.rawRequest).to.equal(mockReq); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + describe("skip token verification debug mode support", () => { + before(() => { + sinon + .stub(debug, "isDebugFeatureEnabled") + .withArgs("skipTokenVerification") + .returns(true); + }); + + after(() => { + sinon.verifyAndRestore(); + }); + + it("should skip auth token verification", async () => { + const projectId = getApp().options.projectId; + const idToken = generateUnsignedIdToken(projectId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { + authorization: "Bearer " + idToken, + }), + expectedData: null, + callableFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + callableFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + + it("should skip app check token verification", async () => { + const projectId = getApp().options.projectId; + const appId = "123:web:abc"; + const appCheckToken = generateUnsignedAppCheckToken(projectId, appId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { appCheckToken }), + expectedData: null, + callableFunction: (data, context) => { + checkAppCheckContext(context, projectId, appId); + return null; + }, + callableFunction2: (request) => { + checkAppCheckContext(request, projectId, appId); + return null; + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + }); + }); + + describe("consume token", () => { + let getAppCheckStub: sinon.SinonStub; + + before(() => { + getAppCheckStub = sinon.stub(appCheck, "getAppCheck"); + }); + + after(() => { + sinon.verifyAndRestore(); + }); + + it("should throw error when using unsupported version of the admin sdk", async () => { + // Older versions of the admin SDK implements verifyToken with 1 argument. + getAppCheckStub.returns({ + verifyToken: (token) => ({ + token, + appId: "abc", + }), + }); + + const projectId = getApp().options.projectId; + const appId = "123:web:abc"; + const appCheckToken = generateAppCheckToken(projectId, appId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { appCheckToken }), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + consumeAppCheckToken: true, + }, + expectedHttpResponse: { + status: 500, + headers: expectedResponseHeaders, + body: { + error: { + message: "Internal Error", + status: "INTERNAL", + }, + }, + }, + }); + }); + + it("should consume the app check token", async () => { + getAppCheckStub.returns({ + verifyToken: (token, opts) => { + expect(opts.consume).to.be.true; + return { + token, + appId: "abc", + }; + }, + }); + + const projectId = getApp().options.projectId; + const appId = "123:web:abc"; + const appCheckToken = generateAppCheckToken(projectId, appId); + await runCallableTest({ + httpRequest: mockRequest(null, "application/json", { appCheckToken }), + expectedData: null, + callableFunction: () => { + return; + }, + callableFunction2: () => { + return; + }, + callableOption: { + cors: { origin: true, methods: "POST" }, + enforceAppCheck: true, + consumeAppCheckToken: true, + }, + expectedHttpResponse: { + status: 200, + headers: expectedResponseHeaders, + body: { result: null }, + }, + }); + }); + }); + }); + + describe("Streaming callables", () => { + it("returns data in SSE format for requests Accept: text/event-stream header", async () => { + const mockReq = mockRequest( + { message: "hello streaming" }, + "application/json", + {}, + { accept: "text/event-stream" } + ); + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + }, + (req, resp) => { + resp.sendChunk("hello"); + return "world"; + }, + "gcfv2" + ); + + const resp = await runHandler(fn, mockReq as any); + const data = [`data: {"message":"hello"}`, `data: {"result":"world"}`]; + expect(resp.body).to.equal([...data, ""].join("\n\n")); + }); + + it("returns error in SSE format", async () => { + const mockReq = mockRequest( + { message: "hello streaming" }, + "application/json", + {}, + { accept: "text/event-stream" } + ); + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + }, + () => { + throw new Error("BOOM"); + }, + "gcfv2" + ); + + const resp = await runHandler(fn, mockReq as any); + const data = [`data: {"error":{"message":"INTERNAL","status":"INTERNAL"}}`]; + expect(resp.body).to.equal([...data, ""].join("\n\n")); + }); + + it("always returns error for v1 callables", async () => { + const mockReq = mockRequest( + { message: "hello streaming" }, + "application/json", + {}, + { accept: "text/event-stream" } + ) as any; + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + }, + () => { + return "hello world"; + }, + "gcfv1" + ); + const resp = await runHandler(fn, mockReq); + expect(JSON.parse(resp.body)).to.deep.equal({ + error: { + status: "INVALID_ARGUMENT", + message: "Unsupported Accept header 'text/event-stream'", + }, + }); + }); + + it("stops processing when client disconnects", async () => { + const mockReq = mockRequest( + { message: "test abort" }, + "application/json", + {}, + { accept: "text/event-stream" } + ) as any; + + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + }, + async (req, resp) => { + await resp.sendChunk("initial message"); + await mockReq.emit("close"); + await resp.sendChunk("should not be sent"); + return "done"; + }, + "gcfv2" + ); + + const resp = await runHandler(fn, mockReq); + + expect(resp.body).to.equal(`data: {"message":"initial message"}\n\n`); + }); + + describe("Heartbeats", () => { + let clock: sinon.SinonFakeTimers; + + beforeEach(() => { + clock = sinon.useFakeTimers(); + }); + + afterEach(() => { + clock.restore(); + }); + + it("sends heartbeat messages at specified interval", async () => { + const mockReq = mockRequest( + { message: "test heartbeat" }, + "application/json", + {}, + { accept: "text/event-stream" } + ); + + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + heartbeatSeconds: 5, + }, + async () => { + // Simulate long-running operation + await new Promise((resolve) => setTimeout(resolve, 11_000)); + return "done"; + }, + "gcfv2" + ); + + const handlerPromise = runHandler(fn, mockReq as any); + await clock.tickAsync(11_000); + const resp = await handlerPromise; + const data = [": ping", ": ping", `data: {"result":"done"}`]; + expect(resp.body).to.equal([...data, ""].join("\n\n")); + }); + + it("doesn't send heartbeat messages if user writes data", async () => { + const mockReq = mockRequest( + { message: "test heartbeat" }, + "application/json", + {}, + { accept: "text/event-stream" } + ); + + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + heartbeatSeconds: 5, + }, + async (resp, res) => { + await new Promise((resolve) => setTimeout(resolve, 3_000)); + res.sendChunk("hello"); + await new Promise((resolve) => setTimeout(resolve, 3_000)); + return "done"; + }, + "gcfv2" + ); + + const handlerPromise = runHandler(fn, mockReq as any); + await clock.tickAsync(10_000); + const resp = await handlerPromise; + const data = [`data: {"message":"hello"}`, `data: {"result":"done"}`]; + expect(resp.body).to.equal([...data, ""].join("\n\n")); + }); + + it("respects null heartbeatSeconds option", async () => { + const mockReq = mockRequest( + { message: "test no heartbeat" }, + "application/json", + {}, + { accept: "text/event-stream" } + ); + + const fn = https.onCallHandler( + { + cors: { origin: true, methods: "POST" }, + heartbeatSeconds: null, + }, + async () => { + await new Promise((resolve) => setTimeout(resolve, 31_000)); + return "done"; + }, + "gcfv2" + ); + + const handlerPromise = runHandler(fn, mockReq as any); + await clock.tickAsync(31_000); + const resp = await handlerPromise; + expect(resp.body).to.equal('data: {"result":"done"}\n\n'); + }); + }); + }); +}); + +describe("encoding/decoding", () => { + it("encodes null", () => { + expect(https.encode(null)).to.be.null; + expect(https.encode(undefined)).to.be.null; + }); + + it("encodes int", () => { + expect(https.encode(1)).to.equal(1); + // Number isn't allowed in our own codebase, but we need to test it, in case + // a user passes one in. There's no reason not to support it, and we don't + // want to unintentionally encode them as {}. + // tslint:disable-next-line + expect(https.encode(Number(1))).to.equal(1); + }); + + it("decodes int", () => { + expect(https.decode(1)).to.equal(1); + }); + + it("encodes long", () => { + expect(https.encode(-9223372036854775000)).to.equal(-9223372036854775000); + }); + + it("decodes long", () => { + expect( + https.decode({ + "@type": "type.googleapis.com/google.protobuf.Int64Value", + value: "-9223372036854775000", + }) + ).to.equal(-9223372036854775000); + }); + + it("encodes unsigned long", () => { + expect(https.encode(9223372036854800000)).to.equal(9223372036854800000); + }); + + it("decodes unsigned long", () => { + expect( + https.decode({ + "@type": "type.googleapis.com/google.protobuf.UInt64Value", + value: "9223372036854800000", + }) + ).to.equal(9223372036854800000); + }); + + it("encodes double", () => { + expect(https.encode(1.2)).to.equal(1.2); + }); + it("decodes double", () => { + expect(https.decode(1.2)).to.equal(1.2); + }); + + it("encodes string", () => { + expect(https.encode("hello")).to.equal("hello"); + }); + + it("decodes string", () => { + expect(https.decode("hello")).to.equal("hello"); + }); + + it("encodes array", () => { + // TODO(klimt): Make this test more interesting once there's some type + // that needs encoding that can be created from JavaScript. + expect(https.encode([1, "2", [3, 4]])).to.deep.equal([1, "2", [3, 4]]); + }); + + it("decodes array", () => { + expect( + https.decode([ + 1, + "2", + [ + 3, + { + value: "1099511627776", + "@type": "type.googleapis.com/google.protobuf.Int64Value", + }, + ], + ]) + ).to.deep.equal([1, "2", [3, 1099511627776]]); + }); + + it("encodes object", () => { + // TODO(klimt): Make this test more interesting once there's some type + // that needs encoding that can be created from JavaScript. + expect( + https.encode({ + foo: 1, + bar: "hello", + baz: [1, 2, 3], + }) + ).to.deep.equal({ + foo: 1, + bar: "hello", + baz: [1, 2, 3], + }); + }); + + it("decodes object", () => { + expect( + https.decode({ + foo: 1, + bar: "hello", + baz: [ + 1, + 2, + { + value: "1099511627776", + "@type": "type.googleapis.com/google.protobuf.Int64Value", + }, + ], + }) + ).to.deep.equal({ + foo: 1, + bar: "hello", + baz: [1, 2, 1099511627776], + }); + }); + + it("encodes function as an empty object", () => { + expect(https.encode(() => "foo")).to.deep.equal({}); + }); +}); + +describe("decode tokens", () => { + const projectId = "myproject"; + const appId = "123:web:abc"; + + it("decodes valid Auth ID Token", () => { + const idToken = https.unsafeDecodeIdToken(generateIdToken(projectId)); + expect(idToken.uid).to.equal(mocks.user_id); + expect(idToken.sub).to.equal(mocks.user_id); + }); + + it("decodes invalid Auth ID Token", () => { + const idToken = https.unsafeDecodeIdToken(generateUnsignedIdToken(projectId)); + expect(idToken.uid).to.equal(mocks.user_id); + expect(idToken.sub).to.equal(mocks.user_id); + }); + + it("decodes valid App Check Token", () => { + const idToken = https.unsafeDecodeAppCheckToken(generateAppCheckToken(projectId, appId)); + expect(idToken.app_id).to.equal(appId); + expect(idToken.sub).to.equal(appId); + }); + + it("decodes invalid App Check Token", () => { + const idToken = https.unsafeDecodeAppCheckToken( + generateUnsignedAppCheckToken(projectId, appId) + ); + expect(idToken.app_id).to.equal(appId); + expect(idToken.sub).to.equal(appId); + }); +}); diff --git a/spec/common/providers/identity.spec.ts b/spec/common/providers/identity.spec.ts new file mode 100644 index 000000000..253a337b2 --- /dev/null +++ b/spec/common/providers/identity.spec.ts @@ -0,0 +1,927 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as express from "express"; +import * as identity from "../../../src/common/providers/identity"; + +const EVENT = "EVENT_TYPE"; +const now = new Date(); +const TEST_NAME = "John Doe"; +const ALLOW = "ALLOW"; +const BLOCK = "BLOCK"; + +describe("identity", () => { + describe("userRecordConstructor", () => { + it("will provide falsey values for fields that are not in raw wire data", () => { + const record = identity.userRecordConstructor({ uid: "123" }); + expect(record.toJSON()).to.deep.equal({ + uid: "123", + email: null, + emailVerified: false, + displayName: null, + photoURL: null, + phoneNumber: null, + disabled: false, + providerData: [], + customClaims: {}, + passwordSalt: null, + passwordHash: null, + tokensValidAfterTime: null, + metadata: { + creationTime: null, + lastSignInTime: null, + }, + }); + }); + + it("will not interfere with fields that are in raw wire data", () => { + const raw: any = { + uid: "123", + email: "email@gmail.com", + emailVerified: true, + displayName: "User", + photoURL: "url", + phoneNumber: "1233332222", + disabled: true, + providerData: [], + customClaims: {}, + passwordSalt: "abc", + passwordHash: "def", + tokensValidAfterTime: "2027-02-02T23:01:19.797Z", + metadata: { + creationTime: "2017-02-02T23:06:26.124Z", + lastSignInTime: "2017-02-02T23:01:19.797Z", + }, + }; + const record = identity.userRecordConstructor(raw); + expect(record.toJSON()).to.deep.equal(raw); + }); + + it("will convert raw wire fields createdAt and lastSignedInAt to creationTime and lastSignInTime", () => { + const raw: any = { + uid: "123", + metadata: { + createdAt: "2017-02-02T23:06:26.124Z", + lastSignedInAt: "2017-02-02T23:01:19.797Z", + }, + }; + const record = identity.userRecordConstructor(raw); + expect(record.metadata).to.deep.equal({ + creationTime: "2017-02-02T23:06:26.124Z", + lastSignInTime: "2017-02-02T23:01:19.797Z", + }); + }); + + it("should stringify the record", () => { + const raw: any = { + uid: "123", + email: "email@gmail.com", + emailVerified: true, + displayName: "User", + photoURL: "url", + phoneNumber: "1233332222", + disabled: true, + providerData: ["something"], + customClaims: { + claim: "value", + another: { + inner: "value", + }, + }, + passwordSalt: "abc", + passwordHash: "def", + tokensValidAfterTime: "2027-02-02T23:01:19.797Z", + metadata: { + creationTime: "2017-02-02T23:06:26.124Z", + lastSignInTime: "2017-02-02T23:01:19.797Z", + }, + }; + const record = identity.userRecordConstructor(raw); + expect(() => JSON.stringify(record)).to.not.throw; + }); + }); + + describe("isValidRequest", () => { + it("should error on non-post", () => { + const req = { + method: "GET", + header: { + "Content-Type": "application/json", + }, + body: { + data: { + jwt: "1.2.3", + }, + }, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.false; + }); + + it("should error on bad Content-Type", () => { + const req = { + method: "POST", + header() { + return "text/css"; + }, + body: { + data: { + jwt: "1.2.3", + }, + }, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.false; + }); + + it("should error without req body", () => { + const req = { + method: "POST", + header() { + return "application/json"; + }, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.false; + }); + + it("should error without req body data", () => { + const req = { + method: "POST", + header() { + return "application/json"; + }, + body: {}, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.false; + }); + + it("should error without req body", () => { + const req = { + method: "POST", + header() { + return "application/json"; + }, + body: { + data: {}, + }, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.false; + }); + + it("should not error on valid request", () => { + const req = { + method: "POST", + header() { + return "application/json"; + }, + body: { + data: { + jwt: "1.2.3", + }, + }, + } as unknown as express.Request; + + expect(identity.isValidRequest(req)).to.be.true; + }); + }); + + describe("parseMetadata", () => { + const decodedMetadata = { + last_sign_in_time: 1476235905000, + creation_time: 1476136676000, + }; + const metadata = { + lastSignInTime: new Date(1476235905000).toUTCString(), + creationTime: new Date(1476136676000).toUTCString(), + }; + + it("should parse an undefined object", () => { + expect(identity.parseMetadata({})).to.deep.equal({ + creationTime: null, + lastSignInTime: null, + }); + }); + + it("should parse a decoded metadata object", () => { + const md = identity.parseMetadata(decodedMetadata); + + expect(md).to.deep.equal(metadata); + }); + }); + + describe("parseProviderData", () => { + const decodedUserInfo = { + provider_id: "google.com", + display_name: TEST_NAME, + photo_url: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + uid: "1234567890", + email: "user@gmail.com", + }; + const userInfo = { + providerId: "google.com", + displayName: TEST_NAME, + photoURL: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + uid: "1234567890", + email: "user@gmail.com", + phoneNumber: undefined, + }; + const decodedUserInfoPhone = { + provider_id: "phone", + phone_number: "+11234567890", + uid: "+11234567890", + }; + const userInfoPhone = { + providerId: "phone", + displayName: undefined, + photoURL: undefined, + uid: "+11234567890", + email: undefined, + phoneNumber: "+11234567890", + }; + + it("should parse the user info", () => { + expect(identity.parseProviderData([decodedUserInfo])).to.deep.equal([userInfo]); + }); + + it("should parse the user info with phone", () => { + expect(identity.parseProviderData([decodedUserInfoPhone])).to.deep.equal([userInfoPhone]); + }); + }); + + describe("parseDate", () => { + it("should return null if tokens undefined", () => { + expect(identity.parseDate()).to.be.null; + }); + + it("should parse the date", () => { + expect(identity.parseDate(1476136676)).to.equal(new Date(1476136676000).toUTCString()); + }); + }); + + describe("parseMultiFactor", () => { + const decodedMultiFactors = { + enrolled_factors: [ + { + uid: "enrollmentId1", + display_name: "displayName1", + enrollment_time: now.toISOString(), + phone_number: "+16505551234", + }, + { + uid: "enrollmentId2", + enrollment_time: now.toISOString(), + }, + ], + }; + const multiFactors = { + enrolledFactors: [ + { + uid: "enrollmentId1", + displayName: "displayName1", + enrollmentTime: now.toUTCString(), + phoneNumber: "+16505551234", + factorId: "phone", + }, + { + uid: "enrollmentId2", + displayName: undefined, + enrollmentTime: now.toUTCString(), + factorId: undefined, + phoneNumber: undefined, + }, + ], + }; + + it("should return null on undefined factor", () => { + expect(identity.parseMultiFactor()).to.be.null; + }); + + it("should return null without enrolled factors", () => { + expect(identity.parseMultiFactor({})).to.be.null; + }); + + it("should error on an invalid factor", () => { + const factors = { + enrolled_factors: [{} as identity.DecodedPayloadMfaInfo], + }; + + expect(() => identity.parseMultiFactor(factors)).to.throw( + "INTERNAL ASSERT FAILED: Invalid multi-factor info response" + ); + }); + + it("should correctly parse factors", () => { + expect(identity.parseMultiFactor(decodedMultiFactors)).to.deep.equal(multiFactors); + }); + }); + + describe("parseUserRecord", () => { + const decodedUserRecord = { + uid: "abcdefghijklmnopqrstuvwxyz", + email: "user@gmail.com", + email_verified: true, + display_name: TEST_NAME, + phone_number: "+11234567890", + provider_data: [ + { + provider_id: "google.com", + display_name: TEST_NAME, + photo_url: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + email: "user@gmail.com", + uid: "1234567890", + }, + { + provider_id: "facebook.com", + display_name: "John Smith", + photo_url: "https://facebook.com/0987654321/photo.jpg", + email: "user@facebook.com", + uid: "0987654321", + }, + { + provider_id: "phone", + uid: "+11234567890", + phone_number: "+11234567890", + }, + { + provider_id: "password", + email: "user@gmail.com", + uid: "user@gmail.com", + display_name: TEST_NAME, + }, + ], + password_hash: "passwordHash", + password_salt: "passwordSalt", + photo_url: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + tokens_valid_after_time: 1476136676, + metadata: { + last_sign_in_time: 1476235905000, + creation_time: 1476136676000, + }, + custom_claims: { + admin: true, + group_id: "group123", + }, + tenant_id: "TENANT_ID", + multi_factor: { + enrolled_factors: [ + { + uid: "enrollmentId1", + display_name: "displayName1", + enrollment_time: now.toISOString(), + phone_number: "+16505551234", + factor_id: "phone", + }, + { + uid: "enrollmentId2", + enrollment_time: now.toISOString(), + phone_number: "+16505556789", + factor_id: "phone", + }, + ], + }, + }; + + const userRecord = { + uid: "abcdefghijklmnopqrstuvwxyz", + email: "user@gmail.com", + phoneNumber: "+11234567890", + emailVerified: true, + disabled: false, + displayName: TEST_NAME, + providerData: [ + { + providerId: "google.com", + displayName: TEST_NAME, + photoURL: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + email: "user@gmail.com", + uid: "1234567890", + phoneNumber: undefined, + }, + { + providerId: "facebook.com", + displayName: "John Smith", + photoURL: "https://facebook.com/0987654321/photo.jpg", + email: "user@facebook.com", + uid: "0987654321", + phoneNumber: undefined, + }, + { + providerId: "phone", + displayName: undefined, + photoURL: undefined, + email: undefined, + uid: "+11234567890", + phoneNumber: "+11234567890", + }, + { + providerId: "password", + displayName: TEST_NAME, + photoURL: undefined, + email: "user@gmail.com", + uid: "user@gmail.com", + phoneNumber: undefined, + }, + ], + passwordHash: "passwordHash", + passwordSalt: "passwordSalt", + photoURL: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + metadata: { + lastSignInTime: new Date(1476235905000).toUTCString(), + creationTime: new Date(1476136676000).toUTCString(), + }, + customClaims: { + admin: true, + group_id: "group123", + }, + tokensValidAfterTime: new Date(1476136676000).toUTCString(), + tenantId: "TENANT_ID", + multiFactor: { + enrolledFactors: [ + { + uid: "enrollmentId1", + displayName: "displayName1", + enrollmentTime: now.toUTCString(), + phoneNumber: "+16505551234", + factorId: "phone", + }, + { + uid: "enrollmentId2", + displayName: undefined, + enrollmentTime: now.toUTCString(), + phoneNumber: "+16505556789", + factorId: "phone", + }, + ], + }, + }; + + it("should error if decoded does not have uid", () => { + expect(() => identity.parseAuthUserRecord({} as identity.DecodedPayloadUserRecord)).to.throw( + "INTERNAL ASSERT FAILED: Invalid user response" + ); + }); + + it("should parse user record", () => { + const ur = identity.parseAuthUserRecord(decodedUserRecord); + + expect(ur).to.deep.equal(userRecord); + }); + }); + + describe("parseAuthEventContext", () => { + const TEST_RECAPTCHA_SCORE = 0.9; + const rawUserInfo = { + name: TEST_NAME, + granted_scopes: + "openid https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile", + id: "123456789", + verified_email: true, + given_name: "John", + locale: "en", + family_name: "Doe", + email: "johndoe@gmail.com", + picture: "https://lh3.googleusercontent.com/1233456789/mo/photo.jpg", + }; + + it("should parse an unknown event", () => { + const decodedJwt = { + aud: "https://us-east1-project_id.cloudfunctions.net/function-1", + exp: 60 * 60 + 1, + iat: 1, + iss: "https://securetoken.google.com/project_id", + sub: "someUid", + uid: "someUid", + event_id: "EVENT_ID", + event_type: EVENT, + ip_address: "1.2.3.4", + user_agent: "USER_AGENT", + locale: "en", + raw_user_info: JSON.stringify(rawUserInfo), + recaptcha_score: TEST_RECAPTCHA_SCORE, + }; + const context = { + locale: "en", + ipAddress: "1.2.3.4", + userAgent: "USER_AGENT", + eventId: "EVENT_ID", + eventType: EVENT, + emailType: undefined, + smsType: undefined, + authType: "UNAUTHENTICATED", + resource: { + service: "identitytoolkit.googleapis.com", + name: "projects/project-id", + }, + timestamp: new Date(1000).toUTCString(), + additionalUserInfo: { + providerId: undefined, + profile: rawUserInfo, + username: undefined, + isNewUser: false, + recaptchaScore: TEST_RECAPTCHA_SCORE, + email: undefined, + phoneNumber: undefined, + }, + credential: null, + params: {}, + }; + + expect(identity.parseAuthEventContext(decodedJwt, "project-id")).to.deep.equal(context); + }); + + it("should parse a beforeSignIn event", () => { + const time = now.getTime(); + const decodedJwt = { + aud: "https://us-east1-project_id.cloudfunctions.net/function-1", + exp: 60 * 60 + 1, + iat: 1, + iss: "https://securetoken.google.com/project_id", + sub: "someUid", + uid: "someUid", + event_id: "EVENT_ID", + event_type: "beforeSignIn", + ip_address: "1.2.3.4", + user_agent: "USER_AGENT", + locale: "en", + sign_in_method: "password", + raw_user_info: JSON.stringify(rawUserInfo), + oauth_id_token: "ID_TOKEN", + oauth_access_token: "ACCESS_TOKEN", + oauth_refresh_token: "REFRESH_TOKEN", + oauth_token_secret: "OAUTH_TOKEN_SECRET", + oauth_expires_in: 3600, + recaptcha_score: TEST_RECAPTCHA_SCORE, + }; + const context = { + locale: "en", + ipAddress: "1.2.3.4", + userAgent: "USER_AGENT", + eventId: "EVENT_ID", + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn:password", + emailType: undefined, + smsType: undefined, + authType: "UNAUTHENTICATED", + resource: { + service: "identitytoolkit.googleapis.com", + name: "projects/project-id", + }, + timestamp: new Date(1000).toUTCString(), + additionalUserInfo: { + providerId: "password", + profile: rawUserInfo, + username: undefined, + isNewUser: false, + recaptchaScore: TEST_RECAPTCHA_SCORE, + email: undefined, + phoneNumber: undefined, + }, + credential: { + claims: undefined, + idToken: "ID_TOKEN", + accessToken: "ACCESS_TOKEN", + refreshToken: "REFRESH_TOKEN", + expirationTime: new Date(time + 3600 * 1000).toUTCString(), + secret: "OAUTH_TOKEN_SECRET", + providerId: "password", + signInMethod: "password", + }, + params: {}, + }; + + expect(identity.parseAuthEventContext(decodedJwt, "project-id", time)).to.deep.equal(context); + }); + + it("should parse a beforeCreate event", () => { + const time = now.getTime(); + // beforeCreate + const decodedJwt = { + aud: "https://us-east1-project_id.cloudfunctions.net/beforeCreate", + exp: 60 * 60 + 1, + iat: 1, + iss: "https://securetoken.google.com/project_id", + sub: "abcdefghijklmnopqrstuvwxyz", + uid: "abcdefghijklmnopqrstuvwxyz", + event_id: "EVENT_ID", + event_type: "beforeCreate", + ip_address: "1.2.3.4", + user_agent: "USER_AGENT", + locale: "en", + sign_in_method: "oidc.provider", + tenant_id: "TENANT_ID", + user_record: { + uid: "abcdefghijklmnopqrstuvwxyz", + email: "user@gmail.com", + email_verified: true, + display_name: TEST_NAME, + phone_number: "+11234567890", + provider_data: [ + { + provider_id: "oidc.provider", + email: "user@gmail.com", + uid: "user@gmail.com", + display_name: TEST_NAME, + }, + ], + photo_url: "https://lh3.googleusercontent.com/1234567890/photo.jpg", + tokens_valid_after_time: 1476136676, + metadata: { + last_sign_in_time: 1476235905000, + creation_time: 1476136676000, + }, + custom_claims: { + admin: true, + group_id: "group123", + }, + tenant_id: "TENANT_ID", + }, + oauth_id_token: "ID_TOKEN", + oauth_access_token: "ACCESS_TOKEN", + oauth_refresh_token: "REFRESH_TOKEN", + oauth_token_secret: "OAUTH_TOKEN_SECRET", + oauth_expires_in: 3600, + raw_user_info: JSON.stringify(rawUserInfo), + recaptcha_score: TEST_RECAPTCHA_SCORE, + }; + const context = { + locale: "en", + ipAddress: "1.2.3.4", + userAgent: "USER_AGENT", + eventId: "EVENT_ID", + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate:oidc.provider", + emailType: undefined, + smsType: undefined, + authType: "USER", + resource: { + service: "identitytoolkit.googleapis.com", + name: "projects/project-id/tenants/TENANT_ID", + }, + timestamp: new Date(1000).toUTCString(), + additionalUserInfo: { + username: undefined, + providerId: "oidc.provider", + profile: rawUserInfo, + isNewUser: true, + recaptchaScore: TEST_RECAPTCHA_SCORE, + email: undefined, + phoneNumber: undefined, + }, + credential: { + claims: undefined, + accessToken: "ACCESS_TOKEN", + expirationTime: new Date(time + 3600 * 1000).toUTCString(), + idToken: "ID_TOKEN", + providerId: "oidc.provider", + refreshToken: "REFRESH_TOKEN", + secret: "OAUTH_TOKEN_SECRET", + signInMethod: "oidc.provider", + }, + params: {}, + }; + + expect(identity.parseAuthEventContext(decodedJwt, "project-id", time)).to.deep.equal(context); + }); + + it("should parse a beforeSendEmail event", () => { + const time = now.getTime(); + const decodedJwt = { + iss: "https://securetoken.google.com/project_id", + aud: "https://us-east1-project_id.cloudfunctions.net/function-1", + iat: 1, + exp: 60 * 60 + 1, + event_id: "EVENT_ID", + event_type: "beforeSendEmail", + user_agent: "USER_AGENT", + ip_address: "1.2.3.4", + locale: "en", + recaptcha_score: TEST_RECAPTCHA_SCORE, + email_type: "RESET_PASSWORD", + email: "johndoe@gmail.com", + }; + const context = { + locale: "en", + ipAddress: "1.2.3.4", + userAgent: "USER_AGENT", + eventId: "EVENT_ID", + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + emailType: "RESET_PASSWORD", + smsType: undefined, + authType: "UNAUTHENTICATED", + resource: { + service: "identitytoolkit.googleapis.com", + name: "projects/project-id", + }, + timestamp: new Date(1000).toUTCString(), + additionalUserInfo: { + isNewUser: false, + profile: undefined, + providerId: undefined, + username: undefined, + recaptchaScore: TEST_RECAPTCHA_SCORE, + email: "johndoe@gmail.com", + phoneNumber: undefined, + }, + credential: null, + params: {}, + }; + + expect(identity.parseAuthEventContext(decodedJwt, "project-id", time)).to.deep.equal(context); + }); + + it("should parse a beforeSendSms event", () => { + const time = now.getTime(); + const decodedJwt = { + iss: "https://securetoken.google.com/project_id", + aud: "https://us-east1-project_id.cloudfunctions.net/function-1", + iat: 1, + exp: 60 * 60 + 1, + event_id: "EVENT_ID", + event_type: "beforeSendSms", + user_agent: "USER_AGENT", + ip_address: "1.2.3.4", + locale: "en", + recaptcha_score: TEST_RECAPTCHA_SCORE, + sms_type: "SIGN_IN_OR_SIGN_UP", + phone_number: "+11234567890", + }; + const context = { + locale: "en", + ipAddress: "1.2.3.4", + userAgent: "USER_AGENT", + eventId: "EVENT_ID", + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + emailType: undefined, + smsType: "SIGN_IN_OR_SIGN_UP", + authType: "UNAUTHENTICATED", + resource: { + service: "identitytoolkit.googleapis.com", + name: "projects/project-id", + }, + timestamp: new Date(1000).toUTCString(), + additionalUserInfo: { + isNewUser: false, + profile: undefined, + providerId: undefined, + username: undefined, + recaptchaScore: TEST_RECAPTCHA_SCORE, + email: undefined, + phoneNumber: "+11234567890", + }, + credential: null, + params: {}, + }; + + expect(identity.parseAuthEventContext(decodedJwt, "project-id", time)).to.deep.equal(context); + }); + }); + + describe("validateAuthResponse", () => { + it("should not throw on undefined request", () => { + expect(() => identity.validateAuthResponse("event", undefined)).to.not.throw; + }); + + it("should throw an error if customClaims have a blocked claim", () => { + expect(() => + identity.validateAuthResponse("beforeCreate", { + customClaims: { acr: "something" }, + }) + ).to.throw('The customClaims claims "acr" are reserved and cannot be specified.'); + }); + + it("should throw an error if customClaims size is too big", () => { + const str = "x".repeat(1000); + + expect(() => + identity.validateAuthResponse("beforeCreate", { + customClaims: { idk: str }, + }) + ).to.throw("The customClaims payload should not exceed 1000 characters."); + }); + + it("should throw an error if sessionClaims have a blocked claim", () => { + expect(() => + identity.validateAuthResponse("beforeSignIn", { + sessionClaims: { acr: "something" }, + }) + ).to.throw('The sessionClaims claims "acr" are reserved and cannot be specified.'); + }); + + it("should throw an error if sessionClaims size is too big", () => { + const str = "x".repeat(1000); + + expect(() => + identity.validateAuthResponse("beforeSignIn", { + sessionClaims: { idk: str }, + }) + ).to.throw("The sessionClaims payload should not exceed 1000 characters."); + }); + + it("should throw an error if the combined customClaims & sessionClaims size is too big", () => { + const str = "x".repeat(501); + + expect(() => + identity.validateAuthResponse("beforeSignIn", { + customClaims: { cc: str }, + sessionClaims: { sc: str }, + }) + ).to.throw( + "The customClaims and sessionClaims payloads should not exceed 1000 characters combined." + ); + }); + }); + + describe("getUpdateMask", () => { + it("should return empty string on undefined response", () => { + expect(identity.getUpdateMask()).to.eq(""); + }); + + it("should return the right claims on a response", () => { + const response = { + displayName: "john", + disabled: false, + emailVerified: true, + photoURL: "google.com", + customClaims: { + claim1: "abc", + }, + sessionClaims: { + claim2: "def", + }, + }; + + expect(identity.getUpdateMask(response)).to.eq( + "displayName,disabled,emailVerified,photoURL,customClaims,sessionClaims" + ); + }); + }); + + describe("generateResponsePayload", () => { + const DISPLAY_NAME_FIELD = "displayName"; + const TEST_RESPONSE = { + displayName: TEST_NAME, + recaptchaActionOverride: BLOCK, + } as identity.BeforeCreateResponse; + + const EXPECT_PAYLOAD = { + userRecord: { displayName: TEST_NAME, updateMask: DISPLAY_NAME_FIELD }, + recaptchaActionOverride: BLOCK, + }; + + const TEST_RESPONSE_RECAPTCHA_ALLOW = { + recaptchaActionOverride: ALLOW, + } as identity.BeforeCreateResponse; + + const EXPECT_PAYLOAD_RECAPTCHA_ALLOW = { + recaptchaActionOverride: ALLOW, + }; + + const TEST_RESPONSE_RECAPTCHA_UNDEFINED = { + displayName: TEST_NAME, + } as identity.BeforeSignInResponse; + + const EXPECT_PAYLOAD_UNDEFINED = { + userRecord: { displayName: TEST_NAME, updateMask: DISPLAY_NAME_FIELD }, + }; + it("should return empty object on undefined response", () => { + expect(identity.generateResponsePayload()).to.eql({}); + }); + + it("should exclude recaptchaActionOverride field from updateMask", () => { + expect(identity.generateResponsePayload(TEST_RESPONSE)).to.deep.equal(EXPECT_PAYLOAD); + }); + + it("should return recaptchaActionOverride when it is true on response", () => { + expect(identity.generateResponsePayload(TEST_RESPONSE_RECAPTCHA_ALLOW)).to.deep.equal( + EXPECT_PAYLOAD_RECAPTCHA_ALLOW + ); + }); + + it("should not return recaptchaActionOverride if undefined", () => { + const payload = identity.generateResponsePayload(TEST_RESPONSE_RECAPTCHA_UNDEFINED); + expect(payload.hasOwnProperty("recaptchaActionOverride")).to.be.false; + expect(payload).to.deep.equal(EXPECT_PAYLOAD_UNDEFINED); + }); + }); +}); diff --git a/spec/common/providers/tasks.spec.ts b/spec/common/providers/tasks.spec.ts new file mode 100644 index 000000000..ce2497faf --- /dev/null +++ b/spec/common/providers/tasks.spec.ts @@ -0,0 +1,301 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { App, initializeApp } from "firebase-admin/app"; + +import { getApp, setApp } from "../../../src/common/app"; +import * as https from "../../../src/common/providers/https"; +import { onDispatchHandler, Request, TaskContext } from "../../../src/common/providers/tasks"; +import * as mocks from "../../fixtures/credential/key.json"; +import { generateIdToken, generateUnsignedIdToken, mockRequest } from "../../fixtures/mockrequest"; +import { checkAuthContext, runHandler } from "../../helper"; + +/** Represents a test case for a Task Queue Function */ +interface TaskTest { + // An http request, mocking a subset of https.Request. + httpRequest: any; + + // The expected format of the request passed to the handler. + expectedData: any; + + taskFunction?: (data: any, context: TaskContext) => void | Promise; + + taskFunction2?: (request: Request) => void | Promise; + + // The expected shape of the http response returned to the callable SDK. + expectedStatus: number; +} + +// Runs a TaskTest test. +export async function runTaskTest(test: TaskTest): Promise { + const taskQueueFunctionV1 = onDispatchHandler(async (data, context) => { + expect(data).to.deep.equal(test.expectedData); + if (test.taskFunction) { + await test.taskFunction(data, context); + } + }); + + const responseV1 = await runHandler(taskQueueFunctionV1, test.httpRequest); + expect(responseV1.status).to.equal(test.expectedStatus); + + const taskQueueFunctionV2 = onDispatchHandler(async (request) => { + expect(request.data).to.deep.equal(test.expectedData); + if (test.taskFunction2) { + await test.taskFunction2(request); + } + }); + + const responseV2 = await runHandler(taskQueueFunctionV2, test.httpRequest); + expect(responseV2.status).to.equal(test.expectedStatus); +} + +describe("onEnqueueHandler", () => { + let app: App; + + function mockEnqueueRequest( + data: unknown, + contentType = "application/json", + context: { authorization?: string } = { authorization: "Bearer abc" }, + headers: Record = {} + ): ReturnType { + return mockRequest(data, contentType, context, headers); + } + + before(() => { + const credential = { + getAccessToken: () => { + return Promise.resolve({ + expires_in: 1000, + access_token: "fake", + }); + }, + getCertificate: () => { + return { + projectId: "aProjectId", + }; + }, + }; + app = initializeApp( + { + projectId: "aProjectId", + credential, + }, + "tq-test-app" + ); + setApp(app); + }); + + after(() => { + setApp(undefined); + }); + + it("should handle success", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest({ foo: "bar" }), + expectedData: { foo: "bar" }, + expectedStatus: 204, + }); + }); + + it("should reject bad method", () => { + const req = mockRequest(null); + req.method = "GET"; + return runTaskTest({ + httpRequest: req, + expectedData: null, + expectedStatus: 400, + }); + }); + + it("should ignore charset", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest(null, "application/json; charset=utf-8"), + expectedData: null, + expectedStatus: 204, + }); + }); + + it("should reject bad content type", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest(null, "text/plain"), + expectedData: null, + expectedStatus: 400, + }); + }); + + it("should reject extra body fields", () => { + const req = mockEnqueueRequest(null); + req.body.extra = "bad"; + return runTaskTest({ + httpRequest: req, + expectedData: null, + expectedStatus: 400, + }); + }); + + it("should handle unhandled error", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest(null), + expectedData: null, + taskFunction: () => { + throw new Error(`ceci n'est pas une error`); + }, + taskFunction2: () => { + throw new Error(`cece n'est pas une error`); + }, + expectedStatus: 500, + }); + }); + + it("should handle unknown error status", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest(null), + expectedData: null, + taskFunction: () => { + throw new https.HttpsError("THIS_IS_NOT_VALID" as any, "nope"); + }, + taskFunction2: () => { + throw new https.HttpsError("THIS_IS_NOT_VALID" as any, "nope"); + }, + expectedStatus: 500, + }); + }); + + it("should handle well-formed error", () => { + return runTaskTest({ + httpRequest: mockEnqueueRequest(null), + expectedData: null, + taskFunction: () => { + throw new https.HttpsError("not-found", "i am error"); + }, + taskFunction2: () => { + throw new https.HttpsError("not-found", "i am error"); + }, + expectedStatus: 404, + }); + }); + + it("should populate context with values from header", () => { + const headers = { + "x-cloudtasks-queuename": "x", + "x-cloudtasks-taskname": "x", + "x-cloudtasks-taskretrycount": "1", + "x-cloudtasks-taskexecutioncount": "1", + "x-cloudtasks-tasketa": "timestamp", + "x-cloudtasks-taskpreviousresponse": "400", + "x-cloudtasks-taskretryreason": "something broke", + }; + const expectedContext = { + queueName: "x", + id: "x", + retryCount: 1, + executionCount: 1, + scheduledTime: "timestamp", + previousResponse: 400, + retryReason: "something broke", + }; + + const projectId = getApp().options.projectId; + const idToken = generateIdToken(projectId); + return runTaskTest({ + httpRequest: mockEnqueueRequest( + {}, + "application/json", + { authorization: "Bearer " + idToken }, + headers + ), + expectedData: {}, + taskFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + expect(context).to.include(expectedContext); + return null; + }, + taskFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + expect(request).to.include(expectedContext); + return null; + }, + expectedStatus: 204, + }); + }); + + it("should handle auth", async () => { + const projectId = getApp().options.projectId; + const idToken = generateIdToken(projectId); + await runTaskTest({ + httpRequest: mockEnqueueRequest(null, "application/json", { + authorization: "Bearer " + idToken, + }), + expectedData: null, + taskFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + taskFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedStatus: 204, + }); + }); + + it("should accept unsigned auth too", async () => { + const projectId = getApp().options.projectId; + const idToken = generateUnsignedIdToken(projectId); + await runTaskTest({ + httpRequest: mockEnqueueRequest(null, "application/json", { + authorization: "Bearer " + idToken, + }), + expectedData: null, + taskFunction: (data, context) => { + checkAuthContext(context, projectId, mocks.user_id); + return null; + }, + taskFunction2: (request) => { + checkAuthContext(request, projectId, mocks.user_id); + return null; + }, + expectedStatus: 204, + }); + }); + + it("should skip auth in emulated environment", async () => { + const restore = process.env.FUNCTIONS_EMULATOR; + process.env.FUNCTIONS_EMULATOR = "true"; + + await runTaskTest({ + httpRequest: mockEnqueueRequest(null, "application/json", {}), + expectedData: null, + taskFunction: (data, context) => { + expect(context.auth).to.be.undefined; + return null; + }, + taskFunction2: (request) => { + expect(request.auth).to.be.undefined; + }, + expectedStatus: 204, + }); + + process.env.FUNCTIONS_EMULATOR = restore; + }); +}); diff --git a/spec/common/trace.spec.ts b/spec/common/trace.spec.ts new file mode 100644 index 000000000..9f30d3f82 --- /dev/null +++ b/spec/common/trace.spec.ts @@ -0,0 +1,103 @@ +import { expect } from "chai"; +import { extractTraceContext } from "../../src/common/trace"; + +describe("getTraceContext", () => { + it("reutrns undefined given object without trace properties", () => { + expect(extractTraceContext({ foo: "bar" })).to.be.undefined; + }); + + describe("traceparent", () => { + it("extracts trace context with sampling on", () => { + expect( + extractTraceContext({ + traceparent: "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01", + }) + ).to.deep.equal({ + version: "00", + traceId: "0af7651916cd43dd8448eb211c80319c", + parentId: "b7ad6b7169203331", + sample: true, + }); + }); + + it("extracts trace context with sampling off", () => { + expect( + extractTraceContext({ + traceparent: "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00", + }) + ).to.deep.equal({ + version: "00", + traceId: "0af7651916cd43dd8448eb211c80319c", + parentId: "b7ad6b7169203331", + sample: false, + }); + }); + + it("returns undefined given invalid trace id", () => { + expect(extractTraceContext({ traceparent: "00-0af7651916cd43dd8448eb211c80319c-ABCDEFG-00" })) + .to.be.undefined; + }); + }); + + describe("X-Cloud-Trace-Context", () => { + it("extracts trace context with sampling on", () => { + expect( + extractTraceContext({ + ["X-Cloud-Trace-Context"]: "105445aa7843bc8bf206b12000100000/2450465917091935019;o=1", + }) + ).to.deep.equal({ + version: "00", + traceId: "105445aa7843bc8bf206b12000100000", + parentId: "2201cdc4ba777400", + sample: true, + }); + }); + + it("extracts trace context with sampling on indicated w/ o=3", () => { + expect( + extractTraceContext({ + ["X-Cloud-Trace-Context"]: "105445aa7843bc8bf206b12000100000/2450465917091935019;o=3", + }) + ).to.deep.equal({ + version: "00", + traceId: "105445aa7843bc8bf206b12000100000", + parentId: "2201cdc4ba777400", + sample: true, + }); + }); + + it("extracts trace context with sampling off", () => { + expect( + extractTraceContext({ + ["X-Cloud-Trace-Context"]: "105445aa7843bc8bf206b12000100000/2450465917091935019;o=0", + }) + ).to.deep.equal({ + version: "00", + traceId: "105445aa7843bc8bf206b12000100000", + parentId: "2201cdc4ba777400", + sample: false, + }); + }); + + it("extracts trace context with no sampling info", () => { + expect( + extractTraceContext({ + ["X-Cloud-Trace-Context"]: "105445aa7843bc8bf206b12000100000/2450465917091935019", + }) + ).to.deep.equal({ + version: "00", + traceId: "105445aa7843bc8bf206b12000100000", + parentId: "2201cdc4ba777400", + sample: false, + }); + }); + + it("returns undefined given invalid parentId", () => { + expect( + extractTraceContext({ + ["X-Cloud-Trace-Context"]: "105445aa7843bc8bf206b12000100000/abcedf;o=0", + }) + ).to.be.undefined; + }); + }); +}); diff --git a/spec/common/utilities/path-pattern.spec.ts b/spec/common/utilities/path-pattern.spec.ts new file mode 100644 index 000000000..fc19f3955 --- /dev/null +++ b/spec/common/utilities/path-pattern.spec.ts @@ -0,0 +1,129 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import { expect } from "chai"; +import * as pathPattern from "../../../src/common/utilities/path-pattern"; + +describe("path-pattern", () => { + describe("trimParam", () => { + it("should trim a capture param without equals", () => { + expect(pathPattern.trimParam("{something}")).to.equal("something"); + }); + + it("should trim a capture param with equals", () => { + expect(pathPattern.trimParam("{something=*}")).to.equal("something"); + }); + }); + + describe("extractMatches", () => { + it("should parse without multi segment", () => { + const pp = new pathPattern.PathPattern("{a}/something/else/{b}/end/{c}"); + + expect(pp.extractMatches("match_a/something/else/match_b/end/match_c")).to.deep.equal({ + a: "match_a", + b: "match_b", + c: "match_c", + }); + }); + + it("should parse multi segment with params after", () => { + const pp = new pathPattern.PathPattern("something/**/else/{a}/hello/{b}/world"); + + expect(pp.extractMatches("something/is/a/thing/else/nothing/hello/user/world")).to.deep.equal( + { + a: "nothing", + b: "user", + } + ); + }); + + it("should parse multi segment param with params after", () => { + const pp = new pathPattern.PathPattern("something/{path=**}/else/{a}/hello/{b}/world"); + + expect(pp.extractMatches("something/is/a/thing/else/nothing/hello/user/world")).to.deep.equal( + { + path: "is/a/thing", + a: "nothing", + b: "user", + } + ); + }); + + it("should parse multi segment with params before", () => { + const pp = new pathPattern.PathPattern("{a}/something/{b}/**/end"); + + expect( + pp.extractMatches("match_a/something/match_b/thing/else/nothing/hello/user/end") + ).to.deep.equal({ + a: "match_a", + b: "match_b", + }); + }); + + it("should parse multi segment param with params before", () => { + const pp = new pathPattern.PathPattern("{a}/something/{b}/{path=**}/end"); + + expect( + pp.extractMatches("match_a/something/match_b/thing/else/nothing/hello/user/end") + ).to.deep.equal({ + a: "match_a", + b: "match_b", + path: "thing/else/nothing/hello/user", + }); + }); + + it("should parse multi segment with params before and after", () => { + const pp = new pathPattern.PathPattern("{a}/something/**/{b}/end"); + + expect( + pp.extractMatches("match_a/something/thing/else/nothing/hello/user/match_b/end") + ).to.deep.equal({ + a: "match_a", + b: "match_b", + }); + }); + + it("should parse multi segment param with params before", () => { + const pp = new pathPattern.PathPattern("{a}/something/{path=**}/{b}/end"); + + expect( + pp.extractMatches("match_a/something/thing/else/nothing/hello/user/match_b/end") + ).to.deep.equal({ + a: "match_a", + b: "match_b", + path: "thing/else/nothing/hello/user", + }); + }); + + // handle an instance param + it("should parse an instance", () => { + const pp = new pathPattern.PathPattern("{a}-something-{b}-else-{c}"); + + expect(pp.extractMatches("match_a-something-match_b-else-match_c")).to.deep.equal({}); + + const anotherPP = new pathPattern.PathPattern("{a}"); + + expect(anotherPP.extractMatches("match_a")).to.deep.equal({ + a: "match_a", + }); + }); + }); +}); diff --git a/spec/common/utilities/path.spec.ts b/spec/common/utilities/path.spec.ts new file mode 100644 index 000000000..3fcf65c3a --- /dev/null +++ b/spec/common/utilities/path.spec.ts @@ -0,0 +1,24 @@ +import { expect } from "chai"; +import { normalizePath, pathParts } from "../../../src/common/utilities/path"; + +describe("utilities", () => { + describe("path", () => { + describe("#normalizePath", () => { + it("should strip leading and trailing slash", () => { + expect(normalizePath("/my/path/is/{rad}/")).to.eq("my/path/is/{rad}"); + }); + }); + + describe("#pathParts", () => { + it("should turn a path into an array of strings", () => { + expect(pathParts("/foo/bar/baz")).to.deep.equal(["foo", "bar", "baz"]); + }); + + it("should turn a root path, empty string, or null path into an empty array", () => { + expect(pathParts("")).to.deep.equal([]); + expect(pathParts(null)).to.deep.equal([]); + expect(pathParts("/")).to.deep.equal([]); + }); + }); + }); +}); diff --git a/spec/config.spec.ts b/spec/config.spec.ts deleted file mode 100644 index aae3ea94d..000000000 --- a/spec/config.spec.ts +++ /dev/null @@ -1,113 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as mockRequire from 'mock-require'; -import { expect } from 'chai'; -import { config, firebaseConfig } from '../src/config'; - -describe('config()', () => { - - afterEach(() => { - mockRequire.stopAll(); - delete config.singleton; - delete process.env.FIREBASE_CONFIG; - delete process.env.FIREBASE_PROJECT; - delete process.env.CLOUD_RUNTIME_CONFIG; - }); - - it('loads config values from .runtimeconfig.json', () => { - mockRequire('../../../.runtimeconfig.json', { foo: 'bar', firebase: {} }); - let loaded = config(); - expect(loaded).to.not.have.property('firebase'); - expect(loaded).to.have.property('foo','bar'); - }); - - it('does not provide firebase config if .runtimeconfig.json not invalid', () => { - mockRequire('../../../.runtimeconfig.json', 'does-not-exist'); - expect(firebaseConfig()).to.be.null; - }); - - it('does not provide firebase config if .ruuntimeconfig.json has no firebase property', () => { - mockRequire('../../../.runtimeconfig.json', {}); - expect(firebaseConfig()).to.be.null; - }); - - it('loads Firebase configs from FIREBASE_PROJECT env variable', () => { - process.env.FIREBASE_PROJECT = JSON.stringify({ - databaseURL: 'foo@firebaseio.com', - }); - expect(firebaseConfig()).to.have.property('databaseURL', 'foo@firebaseio.com'); - }); - - it('loads Firebase configs from FIREBASE_CONFIG env variable', () => { - process.env.FIREBASE_CONFIG = JSON.stringify({ - databaseURL: 'foo@firebaseio.com', - }); - expect(firebaseConfig()).to.have.property('databaseURL', 'foo@firebaseio.com'); - }); - - it('prefers FIREBASE_CONFIG over FIREBASE_PROJECT', () => { - process.env.FIREBASE_CONFIG = JSON.stringify({ - databaseURL: 'firebase_config', - }); - process.env.FIREBASE_PROJECT = JSON.stringify({ - databaseURL: 'firebase_project', - }); - expect(firebaseConfig()).to.have.property('databaseURL', 'firebase_config'); - }); - - it('behaves well when both FIREBASE_PROJECT and .runtimeconfig.json present', () => { - process.env.FIREBASE_PROJECT = JSON.stringify({ - databaseURL: 'foo@firebaseio.com', - }); - mockRequire('../../../.runtimeconfig.json', { - firebase: { - databaseURL: 'foo@firebaseio.com', - }, - foo: 'bar', - }); - expect(firebaseConfig()).to.have.property('databaseURL', 'foo@firebaseio.com'); - expect(config()).to.have.property('foo', 'bar'); - }); - - it('accepts alternative locations for config file', () => { - process.env.CLOUD_RUNTIME_CONFIG = 'another.json'; - mockRequire('another.json', { foo: 'bar', firebase: {} }); - expect(firebaseConfig()).to.not.be.null; - expect(config()).to.have.property('foo','bar'); - }); - - it('accepts full JSON in env.CLOUD_RUNTIME_CONFIG', () => { - process.env.CLOUD_RUNTIME_CONFIG = JSON.stringify({foo: 'bar', firebase:{} }); - expect(firebaseConfig()).to.not.be.null; - expect(config()).to.have.property('foo', 'bar'); - }); - - it('behaves well when both env.CLOUD_RUNTIME_CONFIG and env.FIREBASE_PROJECT are set', () => { - process.env.CLOUD_RUNTIME_CONFIG = JSON.stringify({ foo: 'bar' }); - process.env.FIREBASE_PROJECT = JSON.stringify({ - databaseURL: 'foo@firebaseio.com', - }); - expect(firebaseConfig()).to.have.property('databaseURL', 'foo@firebaseio.com'); - expect(config()).to.have.property('foo', 'bar'); - }); -}); diff --git a/spec/fixtures.ts b/spec/fixtures.ts new file mode 100644 index 000000000..ddc2084fd --- /dev/null +++ b/spec/fixtures.ts @@ -0,0 +1,64 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import { ManifestEndpoint } from "../src/runtime/manifest"; +import { RESET_VALUE } from "../src/common/options"; + +export const MINIMAL_V2_ENDPOINT: ManifestEndpoint = { + availableMemoryMb: RESET_VALUE, + concurrency: RESET_VALUE, + ingressSettings: RESET_VALUE, + maxInstances: RESET_VALUE, + minInstances: RESET_VALUE, + serviceAccountEmail: RESET_VALUE, + timeoutSeconds: RESET_VALUE, + vpc: RESET_VALUE, +}; + +export const MINIMAL_V1_ENDPOINT: ManifestEndpoint = { + availableMemoryMb: RESET_VALUE, + ingressSettings: RESET_VALUE, + maxInstances: RESET_VALUE, + minInstances: RESET_VALUE, + serviceAccountEmail: RESET_VALUE, + timeoutSeconds: RESET_VALUE, + vpc: RESET_VALUE, +}; + +export const FULL_ENDPOINT: ManifestEndpoint = { + region: ["us-west1"], + availableMemoryMb: 512, + timeoutSeconds: 60, + minInstances: 1, + maxInstances: 3, + concurrency: 20, + vpc: { + connector: "aConnector", + egressSettings: "ALL_TRAFFIC", + }, + serviceAccountEmail: "root@", + ingressSettings: "ALLOW_ALL", + cpu: "gcf_gen1", + labels: { + hello: "world", + }, + secretEnvironmentVariables: [{ key: "MY_SECRET" }], +}; diff --git a/spec/fixtures/credential/jwk.json b/spec/fixtures/credential/jwk.json new file mode 100644 index 000000000..cde44767e --- /dev/null +++ b/spec/fixtures/credential/jwk.json @@ -0,0 +1,14 @@ +{ + "p": "9cTVRzGXbDfhIMQW9gXtWveDW0u_Hvwnbjx7TRPgSfawZ0MjgKfSbnyHTDXiqM1ifcN_Nk58KJ-PG9eZ7V7_mfTUnPv2puDaecn-kgHobnTJMoBR9hpzyyMpyNJuMvX4kqE7Qh8iFMBK_-p8ICiW15gK5WykswIKfIOkUZc52XM", + "kty": "RSA", + "q": "pYdUNL244sCoc4XrONKlu787AiHrjFFLHdTjoFLbvxSpszXM8iSjoiFAM_MCF-uWks2iBVDw9wlG4MB7MfNf_fD0i1wqyknSOtfMxknU7D4eU_Sp6tI99Jl8f_GAzODK__k_0MpqqXgZmJbUvYuIXMiha-5lddz8ENa4pYpbr7M", + "d": "MpkXqjmjvzwfmlq3o0uZAXjeeAnBlYQSNaSllBWKepgPjg4FxFIt_BlXex1NeP0npNy_oCgaM_x7NiALaaPhwPK52lhYThc-xomCic1KDkyPecODTPXi4Iw94Q_gp442SYMWz2ZktS-2DgXc3599fGHkY80u0rHNSO8ptdk8SUDUIZ82ZQ3pBhClF_uY3c1jZLuqVgCwKksInZmNPnv3ge088wmQC26t0Ph5u1HU6lISgaqZ8ol23iNWJPf4UEi8Twy1a73nphQS-y1yK9UC3c5Knk-WI2TMmjlxqC02ZjKqnRDxElTj9kpodasPRHRV_KJI8rTaStgxd7peMFODzQ", + "e": "AQAB", + "use": "sig", + "kid": "a12KBE", + "qi": "aJCrZVWeOjxYmMBTTI7aJhxcvvfS3I5Q7wwN4Oyb1rJZ4fgGYjDohlzeZz_3fNantPAgcDbzJfa3XS327sHJGaAVqvDugZUgyHeLZGzXGs-_mlL72wzcfvTa1C9_lIndLNZJle5_mg3xJAqRKV0s7kymSdYt0wL5fDaqo5SDNqQ", + "dp": "haBk2hWzoApt5HPZjCDC4g_rosr3enBdPAm0fL8O1whC95JAjmYw-xPIOH6f42nwYDLYSv23chr3I4tBTRe2382HgGdav3dIMqnKOTbCWrQy5LtyVN4jEVLoGCGZ-ylT4t25K4Vj8WZwIN8saAvJoCUx33YHwrCcZQDqadZQhNM", + "alg": "RS256", + "dq": "j6NdeN7hnzMbehPNyGNSmhcZd4JDymGI03w3gpokQi4GDJM1IzKUJE7CTdIkEOnIod97Jy3TzCrqrIGa5f-RXuVG79-s6hkhKxq0gaTz9YT6AFShVjnWtXizRrskz6SJw5JgxCfCYwjq_TR1q313eTxIh0Y6GQsIWPxbApuLcG0", + "n": "nunJGpOcPvVsP3q-NLgf3H6OycPhnXUxywMR2_H_JJP7BUIDSsYcOGBTFe7OphHYfyb1Gs14yAER243swndpNbQkuDJhj9a9kK6dJZmPGmvCySk_E5URj6MimZg1MBbwhsVAbRp2uerESZuoRrfdTdV87E3pGyg6Irl0IXRjy5w9SsFjjIi7E-Qxpf3TcNNjfVRLj9V2bSzmS7hlsPKBhDon0tWecuNKoNNMiGI46mz_MSUa2y1lPV6Cqhf1su_TRd7N7u9eP7xWArr7wqtqHiFTZ3qp1xoA_dr_xv_Ao2kBtohZiAFLV-PQShprSN5fafztRZFkSEF0m2tUkvmoaQ" +} diff --git a/spec/fixtures/credential/key.d.ts b/spec/fixtures/credential/key.d.ts index fc5556fa8..1c1e4dbe7 100644 --- a/spec/fixtures/credential/key.d.ts +++ b/spec/fixtures/credential/key.d.ts @@ -1,5 +1,5 @@ /* tslint:disable */ -declare module "*key.json" { +declare module '*key.json' { const type: string; const user_id: string; const project_id: string; diff --git a/spec/fixtures/extsdk/local/index.d.ts b/spec/fixtures/extsdk/local/index.d.ts new file mode 100644 index 000000000..ed3a39c74 --- /dev/null +++ b/spec/fixtures/extsdk/local/index.d.ts @@ -0,0 +1,37 @@ +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester SDK for backfill@0.0.2 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for Local extension. + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +export type DoBackfillParam = "True" | "False"; +export type LocationParam = "us-central1" | "us-east1" | "us-east4" | "europe-west1" | "europe-west2" | "europe-west3" | "asia-east2" | "asia-northeast1"; +/** + * Parameters for backfill@0.0.2 extension + */ +export interface BackfillParams { + /** + * Do a backfill + */ + DO_BACKFILL: DoBackfillParam; + /** + * Cloud Functions location + */ + LOCATION: LocationParam; +} +export declare function backfill(instanceId: string, params: BackfillParams): Backfill; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester + * A tester for the TaskQueue/LCE/RuntimeStatus project + */ +export declare class Backfill { + private instanceId; + private params; + readonly FIREBASE_EXTENSION_LOCAL_PATH = "./functions/generated/extensions/local/backfill/0.0.2/src"; + constructor(instanceId: string, params: BackfillParams); + getInstanceId(): string; + getParams(): BackfillParams; +} \ No newline at end of file diff --git a/spec/fixtures/extsdk/local/index.js b/spec/fixtures/extsdk/local/index.js new file mode 100644 index 000000000..f1f9cce55 --- /dev/null +++ b/spec/fixtures/extsdk/local/index.js @@ -0,0 +1,30 @@ +"use strict"; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester SDK for extensions-try-backfill3@0.0.2 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for Local extension. + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.backfill = exports.backfill = void 0; +function backfill(instanceId, params) { + return new Backfill(instanceId, params); +} +exports.backfill = backfill; +/** + * TaskQueue/LifecycleEvent/RuntimeStatus Tester + * A tester for the TaskQueue/LCE/RuntimeStatus project + */ +class Backfill { + constructor(instanceId, params) { + this.instanceId = instanceId; + this.params = params; + this.FIREBASE_EXTENSION_LOCAL_PATH = "./functions/generated/extensions/local/backfill/0.0.2/src"; + } + getInstanceId() { return this.instanceId; } + getParams() { return this.params; } +} +exports.Backfill = Backfill; diff --git a/spec/fixtures/extsdk/local/package.json b/spec/fixtures/extsdk/local/package.json new file mode 100644 index 000000000..700806b3e --- /dev/null +++ b/spec/fixtures/extsdk/local/package.json @@ -0,0 +1,4 @@ +{ + "name": "@firebase-extensions/local-backfill-sdk", + "main": "./index.js" + } \ No newline at end of file diff --git a/spec/fixtures/extsdk/translate/index.d.ts b/spec/fixtures/extsdk/translate/index.d.ts new file mode 100644 index 000000000..8e5cee195 --- /dev/null +++ b/spec/fixtures/extsdk/translate/index.d.ts @@ -0,0 +1,122 @@ +/** + * Translate Text in Firestore SDK for firestore-translate-text@0.1.18 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for firestore-translate-text@0.1.18" + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +import { CloudEvent } from "../../../../v2"; +import { EventarcTriggerOptions } from "../../../../v2/eventarc"; +export type EventCallback = (event: CloudEvent) => unknown | Promise; +export type SimpleEventarcTriggerOptions = Omit; +export type EventArcRegionType = "us-central1" | "us-west1" | "europe-west4" | "asia-northeast1"; +export type SystemFunctionVpcConnectorEgressSettingsParam = "VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED" | "PRIVATE_RANGES_ONLY" | "ALL_TRAFFIC"; +export type SystemFunctionIngressSettingsParam = "ALLOW_ALL" | "ALLOW_INTERNAL_ONLY" | "ALLOW_INTERNAL_AND_GCLB"; +export type SystemFunctionLocationParam = "us-central1" | "us-east1" | "us-east4" | "us-west1" | "us-west2" | "us-west3" | "us-west4" | "europe-central2" | "europe-west1" | "europe-west2" | "europe-west3" | "europe-west6" | "asia-east1" | "asia-east2" | "asia-northeast1" | "asia-northeast2" | "asia-northeast3" | "asia-south1" | "asia-southeast1" | "asia-southeast2" | "northamerica-northeast1" | "southamerica-east1" | "australia-southeast1"; +export type SystemFunctionMemoryParam = "128" | "256" | "512" | "1024" | "2048" | "4096" | "8192"; +/** + * Parameters for firestore-translate-text@0.1.18 extension + */ +export interface FirestoreTranslateTextParams { + /** + * Target languages for translations, as a comma-separated list + */ + LANGUAGES: string; + /** + * Collection path + */ + COLLECTION_PATH: string; + /** + * Input field name + */ + INPUT_FIELD_NAME: string; + /** + * Translations output field name + */ + OUTPUT_FIELD_NAME: string; + /** + * Languages field name + */ + LANGUAGES_FIELD_NAME?: string; + /** + * Event Arc Region + */ + _EVENT_ARC_REGION?: EventArcRegionType; + /** + * Function timeout seconds + */ + _FUNCTION_TIMEOUT_SECONDS?: string; + /** + * VPC Connector + */ + _FUNCTION_VPC_CONNECTOR?: string; + /** + * VPC Connector Egress settings + */ + _FUNCTION_VPC_CONNECTOR_EGRESS_SETTINGS?: SystemFunctionVpcConnectorEgressSettingsParam; + /** + * Minimum function instances + */ + _FUNCTION_MIN_INSTANCES?: string; + /** + * Maximum function instances + */ + _FUNCTION_MAX_INSTANCES?: string; + /** + * Function ingress settings + */ + _FUNCTION_INGRESS_SETTINGS?: SystemFunctionIngressSettingsParam; + /** + * Function labels + */ + _FUNCTION_LABELS?: string; + /** + * KMS key name + */ + _FUNCTION_KMS_KEY_NAME?: string; + /** + * Docker repository + */ + _FUNCTION_DOCKER_REPOSITORY?: string; + /** + * Cloud Functions location + */ + _FUNCTION_LOCATION: SystemFunctionLocationParam; + /** + * Function memory + */ + _FUNCTION_MEMORY?: SystemFunctionMemoryParam; +} +export declare function firestoreTranslateText(instanceId: string, params: FirestoreTranslateTextParams): FirestoreTranslateText; +/** + * Translate Text in Firestore + * Translates strings written to a Cloud Firestore collection into multiple languages (uses Cloud Translation API). + */ +export declare class FirestoreTranslateText { + private instanceId; + private params; + events: string[]; + readonly FIREBASE_EXTENSION_REFERENCE = "firebase/firestore-translate-text@0.1.18"; + readonly EXTENSION_VERSION = "0.1.18"; + constructor(instanceId: string, params: FirestoreTranslateTextParams); + getInstanceId(): string; + getParams(): FirestoreTranslateTextParams; + /** + * Occurs when a trigger has been called within the Extension, and will include data such as the context of the trigger request. + */ + onStart(callback: EventCallback, options?: SimpleEventarcTriggerOptions): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when image resizing completes successfully. The event will contain further details about specific formats and sizes. + */ + onSuccess(callback: EventCallback, options?: SimpleEventarcTriggerOptions): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when an issue has been experienced in the Extension. This will include any error data that has been included within the Error Exception. + */ + onError(callback: EventCallback, options?: SimpleEventarcTriggerOptions): import("firebase-functions/v2").CloudFunction>; + /** + * Occurs when the function is settled. Provides no customized data other than the context. + */ + onCompletion(callback: EventCallback, options?: SimpleEventarcTriggerOptions): import("firebase-functions/v2").CloudFunction>; +} \ No newline at end of file diff --git a/spec/fixtures/extsdk/translate/index.js b/spec/fixtures/extsdk/translate/index.js new file mode 100644 index 000000000..e263f582f --- /dev/null +++ b/spec/fixtures/extsdk/translate/index.js @@ -0,0 +1,61 @@ +"use strict"; +/** + * Translate Text in Firestore SDK for firestore-translate-text@0.1.18 + * + * When filing bugs or feature requests please specify: + * "Extensions SDK v1.0.0 for firestore-translate-text@0.1.18" + * https://github.com/firebase/firebase-tools/issues/new/choose + * + * GENERATED FILE. DO NOT EDIT. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FirestoreTranslateText = exports.firestoreTranslateText = void 0; +const eventarc_1 = require("../../../../src/v2/providers/eventarc"); +function firestoreTranslateText(instanceId, params) { + return new FirestoreTranslateText(instanceId, params); +} +exports.firestoreTranslateText = firestoreTranslateText; +/** + * Translate Text in Firestore + * Translates strings written to a Cloud Firestore collection into multiple languages (uses Cloud Translation API). + */ +class FirestoreTranslateText { + constructor(instanceId, params) { + this.instanceId = instanceId; + this.params = params; + this.events = []; + this.FIREBASE_EXTENSION_REFERENCE = "firebase/firestore-translate-text@0.1.18"; + this.EXTENSION_VERSION = "0.1.18"; + } + getInstanceId() { return this.instanceId; } + getParams() { return this.params; } + /** + * Occurs when a trigger has been called within the Extension, and will include data such as the context of the trigger request. + */ + onStart(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onStart"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onStart", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when image resizing completes successfully. The event will contain further details about specific formats and sizes. + */ + onSuccess(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onSuccess"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onSuccess", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when an issue has been experienced in the Extension. This will include any error data that has been included within the Error Exception. + */ + onError(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onError"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onError", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } + /** + * Occurs when the function is settled. Provides no customized data other than the context. + */ + onCompletion(callback, options) { + this.events.push("firebase.extensions.firestore-translate-text.v1.onCompletion"); + return (0, eventarc_1.onCustomEventPublished)(Object.assign(Object.assign({}, options), { "eventType": "firebase.extensions.firestore-translate-text.v1.onCompletion", "channel": `projects/locations/${this.params._EVENT_ARC_REGION}/channels/firebase`, "region": `${this.params._EVENT_ARC_REGION}` }), callback); + } +} +exports.FirestoreTranslateText = FirestoreTranslateText; diff --git a/spec/fixtures/extsdk/translate/package.json b/spec/fixtures/extsdk/translate/package.json new file mode 100644 index 000000000..964287a7e --- /dev/null +++ b/spec/fixtures/extsdk/translate/package.json @@ -0,0 +1,4 @@ +{ + "name": "@firebase-extensions/firebase-firestore-translate-text-sdk", + "main": "./index.js" +} \ No newline at end of file diff --git a/spec/fixtures/http.ts b/spec/fixtures/http.ts index d88a163c0..efda2a501 100644 --- a/spec/fixtures/http.ts +++ b/spec/fixtures/http.ts @@ -39,14 +39,10 @@ export function mockRCVariableFetch( data: any, token: string = 'thetoken' ): nock.Scope { - let mock: nock.Scope = nock('https://runtimeconfig.googleapis.com') - .get(`/v1beta1/projects/${projectId}/configs/firebase/variables/${varName}`); - - if (token) { - mock = mock.matchHeader('Authorization', `Bearer ${token}`); - } - - return mock.reply(200, {text: JSON.stringify(data)}); + return nock('https://runtimeconfig.googleapis.com') + .get(`/v1beta1/projects/${projectId}/configs/firebase/variables/${varName}`) + .matchHeader('Authorization', `Bearer ${token}`) + .reply(200, { text: JSON.stringify(data) }); } export function mockMetaVariableWatch( @@ -55,43 +51,54 @@ export function mockMetaVariableWatch( token: string = 'thetoken', updateTime: string = new Date().toISOString() ): nock.Scope { - let mock: nock.Scope = nock('https://runtimeconfig.googleapis.com') - .post(`/v1beta1/projects/${projectId}/configs/firebase/variables/meta:watch`); - - if (token) { - mock = mock.matchHeader('Authorization', `Bearer ${token}`); - } - - return mock.reply(200, { - updateTime, - state: 'UPDATED', - text: JSON.stringify(data), - }); + return nock('https://runtimeconfig.googleapis.com') + .post( + `/v1beta1/projects/${projectId}/configs/firebase/variables/meta:watch` + ) + .matchHeader('Authorization', `Bearer ${token}`) + .reply(200, { + updateTime, + state: 'UPDATED', + text: JSON.stringify(data), + }); } -export function mockMetaVariableWatchTimeout(projectId: string, delay: number, token?: string): nock.Scope { - let mock: nock.Scope = nock('https://runtimeconfig.googleapis.com') - .post(`/v1beta1/projects/${projectId}/configs/firebase/variables/meta:watch`); +export function mockMetaVariableWatchTimeout( + projectId: string, + delay: number, + token?: string +): nock.Scope { + let interceptor = nock('https://runtimeconfig.googleapis.com').post( + `/v1beta1/projects/${projectId}/configs/firebase/variables/meta:watch` + ); - if (token) { - mock = mock.matchHeader('Authorization', `Bearer ${token}`); + if (interceptor) { + interceptor = interceptor.matchHeader('Authorization', `Bearer ${token}`); } - return mock.delay(delay).reply(502); + return interceptor.delay(delay).reply(502); } -export function mockCreateToken(token: AccessToken = {access_token: 'aToken', expires_in: 3600}): nock.Scope { - let mock: nock.Scope = nock('https://accounts.google.com').post('/o/oauth2/token'); - return mock.reply(200, token); +export function mockCreateToken( + token: AccessToken = { access_token: 'aToken', expires_in: 3600 } +): nock.Scope { + return nock('https://accounts.google.com') + .post('/o/oauth2/token') + .reply(200, token); } -export function mockRefreshToken(token: AccessToken = {access_token: 'aToken', expires_in: 3600}): nock.Scope { - let mock: nock.Scope = nock('https://www.googleapis.com').post('/oauth2/v4/token'); - return mock.reply(200, token); +export function mockRefreshToken( + token: AccessToken = { access_token: 'aToken', expires_in: 3600 } +): nock.Scope { + return nock('https://www.googleapis.com') + .post('/oauth2/v4/token') + .reply(200, token); } -export function mockMetadataServiceToken(token: AccessToken = {access_token: 'aToken', expires_in: 3600}): nock.Scope { - let mock: nock.Scope = nock('http://metadata.google.internal') - .get('/computeMetadata/v1beta1/instance/service-accounts/default/token'); - return mock.reply(200, token); +export function mockMetadataServiceToken( + token: AccessToken = { access_token: 'aToken', expires_in: 3600 } +): nock.Scope { + return nock('http://metadata.google.internal') + .get('/computeMetadata/v1beta1/instance/service-accounts/default/token') + .reply(200, token); } diff --git a/spec/fixtures/mockrequest.ts b/spec/fixtures/mockrequest.ts new file mode 100644 index 000000000..28759f94c --- /dev/null +++ b/spec/fixtures/mockrequest.ts @@ -0,0 +1,158 @@ +import { EventEmitter } from 'node:stream'; + +import jwt from 'jsonwebtoken'; +import jwkToPem from 'jwk-to-pem'; +import nock from 'nock'; +import * as mockJWK from '../fixtures/credential/jwk.json'; +import * as mockKey from '../fixtures/credential/key.json'; + +// MockRequest mocks an https.Request. +export class MockRequest extends EventEmitter { + public method: 'POST' | 'GET' | 'OPTIONS' = 'POST'; + + constructor( + readonly body: any, + readonly headers: { [name: string]: string } + ) { + super() + } + + public header(name: string): string { + return this.headers[name.toLowerCase()]; + } +} + +// Creates a mock request with the given data and content-type. +export function mockRequest( + data: any, + contentType: string = 'application/json', + context: { + authorization?: string; + instanceIdToken?: string; + appCheckToken?: string; + } = {}, + reqHeaders?: Record, +) { + const body: any = {}; + if (typeof data !== 'undefined') { + body.data = data; + } + + const headers = { + 'content-type': contentType, + authorization: context.authorization, + 'firebase-instance-id-token': context.instanceIdToken, + 'x-firebase-appcheck': context.appCheckToken, + origin: 'example.com', + ...reqHeaders, + }; + + return new MockRequest(body, headers); +} + +export const expectedResponseHeaders = { + 'Access-Control-Allow-Origin': 'example.com', + Vary: 'Origin', +}; + +/** + * Mocks out the http request used by the firebase-admin SDK to get the key for + * verifying an id token. + */ +export function mockFetchPublicKeys(): nock.Scope { + const mockedResponse = { [mockKey.key_id]: mockKey.public_key }; + const headers = { + 'cache-control': 'public, max-age=1, must-revalidate, no-transform', + }; + + return nock('https://www.googleapis.com:443') + .get('/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com') + .reply(200, mockedResponse, headers); +} + +/** + * Generates a mocked Firebase ID token. + */ +export function generateIdToken(projectId: string): string { + const claims = {}; + const options: jwt.SignOptions = { + audience: projectId, + expiresIn: 60 * 60, // 1 hour in seconds + issuer: 'https://securetoken.google.com/' + projectId, + subject: mockKey.user_id, + algorithm: 'RS256', + header: { + kid: mockKey.key_id, + alg: 'RS256', + }, + }; + return jwt.sign(claims, mockKey.private_key, options); +} + +/** + * Generates a mocked, unsigned Firebase ID token. + */ +export function generateUnsignedIdToken(projectId: string): string { + return [ + { alg: 'RS256', typ: 'JWT' }, + { aud: projectId, sub: mockKey.user_id }, + 'Invalid signature', + ] + .map((str) => JSON.stringify(str)) + .map((str) => Buffer.from(str).toString('base64')) + .join('.'); +} + +/** + * Mocks out the http request used by the firebase-admin SDK to get the jwks for + * verifying an AppCheck token. + */ +export function mockFetchAppCheckPublicJwks(): nock.Scope { + const { kty, use, alg, kid, n, e } = mockJWK; + const mockedResponse = { + keys: [{ kty, use, alg, kid, n, e }], + }; + + return nock('https://firebaseappcheck.googleapis.com:443') + .get('/v1/jwks') + .reply(200, mockedResponse); +} + +/** + * Generates a mocked AppCheck token. + */ +export function generateAppCheckToken( + projectId: string, + appId: string +): string { + const claims = {}; + const options: jwt.SignOptions = { + audience: [`projects/${projectId}`], + expiresIn: 60 * 60, // 1 hour in seconds + issuer: `https://firebaseappcheck.googleapis.com/${projectId}`, + subject: appId, + header: { + alg: 'RS256', + typ: 'JWT', + kid: mockJWK.kid, + }, + }; + return jwt.sign(claims, jwkToPem(mockJWK, { private: true }), options); +} + +/** + * Generates a mocked, unsigned AppCheck token. + */ +export function generateUnsignedAppCheckToken( + projectId: string, + appId: string +): string { + return [ + { alg: 'RS256', typ: 'JWT' }, + { aud: [`projects/${projectId}`], sub: appId }, + 'Invalid signature', + ] + .map((component) => JSON.stringify(component)) + .map((str) => Buffer.from(str).toString('base64')) + .join('.'); +} diff --git a/spec/fixtures/sources/commonjs-grouped/g1.js b/spec/fixtures/sources/commonjs-grouped/g1.js new file mode 100644 index 000000000..4ddf39aa3 --- /dev/null +++ b/spec/fixtures/sources/commonjs-grouped/g1.js @@ -0,0 +1,9 @@ +const functions = require("../../../../src/v1"); + +exports.groupedhttp = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.groupedcallable = functions.https.onCall(() => { + return "PASS"; +}); diff --git a/spec/fixtures/sources/commonjs-grouped/index.js b/spec/fixtures/sources/commonjs-grouped/index.js new file mode 100644 index 000000000..06c976a71 --- /dev/null +++ b/spec/fixtures/sources/commonjs-grouped/index.js @@ -0,0 +1,46 @@ +const functions = require("../../../../src/v1"); +const functionsv2 = require("../../../../src/v2"); +const firestoreTranslateText = require("../../extsdk/translate").firestoreTranslateText; +const backfill = require("../../extsdk/local").backfill; + + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); + +exports.g1 = require("./g1"); diff --git a/spec/fixtures/sources/commonjs-grouped/package.json b/spec/fixtures/sources/commonjs-grouped/package.json new file mode 100644 index 000000000..1ec99f52f --- /dev/null +++ b/spec/fixtures/sources/commonjs-grouped/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs-grouped" +} diff --git a/spec/fixtures/sources/commonjs-main/functions.js b/spec/fixtures/sources/commonjs-main/functions.js new file mode 100644 index 000000000..b33e0cb71 --- /dev/null +++ b/spec/fixtures/sources/commonjs-main/functions.js @@ -0,0 +1,43 @@ +const functions = require("../../../../src/v1"); +const functionsv2 = require("../../../../src/v2"); +const firestoreTranslateText = require("../../extsdk/translate").firestoreTranslateText; +const backfill = require("../../extsdk/local").backfill; + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/spec/fixtures/sources/commonjs-main/package.json b/spec/fixtures/sources/commonjs-main/package.json new file mode 100644 index 000000000..a781259f8 --- /dev/null +++ b/spec/fixtures/sources/commonjs-main/package.json @@ -0,0 +1,4 @@ +{ + "name": "commonjs-main", + "main": "functions.js" +} diff --git a/spec/fixtures/sources/commonjs-parametrized-fields/index.js b/spec/fixtures/sources/commonjs-parametrized-fields/index.js new file mode 100644 index 000000000..c6f37a309 --- /dev/null +++ b/spec/fixtures/sources/commonjs-parametrized-fields/index.js @@ -0,0 +1,30 @@ +const functions = require("../../../../src/v1/index"); +const functionsv2 = require("../../../../src/v2/index"); +const params = require("../../../../src/params"); +params.clearParams(); + +const stringParam = params.defineString("STRING_PARAM"); +const intParam = params.defineInt("INT_PARAM"); +const boolParam = params.defineBoolean("BOOLEAN_PARAM"); + +exports.v1http = functions.runWith({ + minInstances: intParam, + maxInstances: intParam, + memory: intParam, + timeoutSeconds: intParam, + serviceAccount: stringParam, + omit: boolParam +}).https.onRequest((req, resp) => { + resp.status(200).send("Hello world!"); +}); + +exports.v2http = functionsv2.https.onRequest({ + minInstances: intParam, + maxInstances: intParam, + memory: intParam, + timeoutSeconds: intParam, + serviceAccount: stringParam, + omit: boolParam +}, (req, resp) => { + resp.status(200).send("Hello world!"); +}); diff --git a/spec/fixtures/sources/commonjs-parametrized-fields/package.json b/spec/fixtures/sources/commonjs-parametrized-fields/package.json new file mode 100644 index 000000000..5d167975d --- /dev/null +++ b/spec/fixtures/sources/commonjs-parametrized-fields/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs-parametrized-fields" +} diff --git a/spec/fixtures/sources/commonjs-params/index.js b/spec/fixtures/sources/commonjs-params/index.js new file mode 100644 index 000000000..76d07559c --- /dev/null +++ b/spec/fixtures/sources/commonjs-params/index.js @@ -0,0 +1,67 @@ +const functions = require("../../../../src/v1/index"); +const functionsv2 = require("../../../../src/v2/index"); +const firestoreTranslateText = require("../../extsdk/translate").firestoreTranslateText; +const backfill = require("../../extsdk/local").backfill; +const params = require("../../../../src/params"); + +params.defineString("BORING"); +const foo = params.defineString("FOO", { input: { text: { validationRegex: "w+" } } }); +const bar = params.defineString("BAR", { default: foo, label: "asdf" }); +params.defineString("BAZ", { input: { select: { options: [{ value: "a" }, { value: "b" }] } } }); + +params.defineInt("AN_INT", { default: bar.equals("qux").thenElse(0, 1) }); +params.defineInt("ANOTHER_INT", { + input: { + select: { + options: [ + { label: "a", value: -2 }, + { label: "b", value: 2 }, + ], + }, + }, +}); + +params.defineList("LIST_PARAM", {input: { multiSelect: { options: [{ value: "c" }, { value: "d" }, { value: "e" }]}}}) + +params.defineSecret("SUPER_SECRET_FLAG"); + +// N.B: invocation of the precanned internal params should not affect the manifest + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send(params.projectID); +}); + +exports.v1callable = functions.https.onCall(() => { + return params.databaseURL; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send(params.gcloudProject); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return params.databaseURL; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); diff --git a/spec/fixtures/sources/commonjs-params/package.json b/spec/fixtures/sources/commonjs-params/package.json new file mode 100644 index 000000000..91f8c11da --- /dev/null +++ b/spec/fixtures/sources/commonjs-params/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs-params" +} diff --git a/spec/fixtures/sources/commonjs/index.js b/spec/fixtures/sources/commonjs/index.js new file mode 100644 index 000000000..b33e0cb71 --- /dev/null +++ b/spec/fixtures/sources/commonjs/index.js @@ -0,0 +1,43 @@ +const functions = require("../../../../src/v1"); +const functionsv2 = require("../../../../src/v2"); +const firestoreTranslateText = require("../../extsdk/translate").firestoreTranslateText; +const backfill = require("../../extsdk/local").backfill; + +exports.v1http = functions.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v1callable = functions.https.onCall(() => { + return "PASS"; +}); + +exports.v2http = functionsv2.https.onRequest((req, resp) => { + resp.status(200).send("PASS"); +}); + +exports.v2callable = functionsv2.https.onCall(() => { + return "PASS"; +}); + +// A Firebase extension by ref +const extRef1 = firestoreTranslateText("extRef1", { + "COLLECTION_PATH": "collection1", + "INPUT_FIELD_NAME": "input1", + "LANGUAGES": "de,es", + "OUTPUT_FIELD_NAME": "translated", + "_EVENT_ARC_REGION": "us-central1", + "_FUNCTION_LOCATION": "us-central1", +}); +exports.extRef1 = extRef1; + +// A Firebase function defined by extension event +const ttOnStart = extRef1.onStart((event) => { + console.log("onStart got event: " + JSON.stringify(event, null, 2)); +}); +exports.ttOnStart = ttOnStart; + +// A Firebase extension by localPath +exports.extLocal2 = backfill("extLocal2", { + DO_BACKFILL: "False", + LOCATION: "us-central1", +}); \ No newline at end of file diff --git a/spec/fixtures/sources/commonjs/package.json b/spec/fixtures/sources/commonjs/package.json new file mode 100644 index 000000000..30e1b1b27 --- /dev/null +++ b/spec/fixtures/sources/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "name": "commonjs" +} diff --git a/spec/helper.ts b/spec/helper.ts new file mode 100644 index 000000000..c3f0f38ff --- /dev/null +++ b/spec/helper.ts @@ -0,0 +1,161 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as express from "express"; + +import * as https from "../src/common/providers/https"; +import * as tasks from "../src/common/providers/tasks"; + +/** + * RunHandlerResult contains the data from an express.Response. + */ +export interface RunHandlerResult { + status: number; + headers: { [name: string]: string }; + body: any; +} + +/** + * Runs an express handler with a given request asynchronously and returns the + * data populated into the response. + */ +export function runHandler( + handler: ( + req: https.Request, + res: express.Response, + next?: express.NextFunction + ) => void | Promise, + request: https.Request +): Promise { + return new Promise((resolve) => { + // MockResponse mocks an express.Response. + // This class lives here so it can reference resolve and reject. + class MockResponse { + private sentBody: string | undefined; + private statusCode = 0; + private headers: { [name: string]: string } = {}; + private callback: () => void; + private writeCalled = false; + + constructor() { + request.on("close", () => this.end()); + } + + public status(code: number) { + this.statusCode = code; + return this; + } + + // Headers are only set by the cors handler. + public setHeader(name: string, value: string) { + this.headers[name] = value; + } + + public getHeader(name: string): string { + return this.headers[name]; + } + + public send(sendBody: any) { + if (this.writeCalled) { + throw Error("Cannot set headers after they are sent to the client"); + } + + const toSend = typeof sendBody === "object" ? JSON.stringify(sendBody) : sendBody; + const body = + typeof this.sentBody === "undefined" ? toSend : this.sentBody + String(toSend || ""); + this.end(body); + } + + public write(writeBody: any, cb?: () => void) { + this.writeCalled = true; + + if (typeof this.sentBody === "undefined") { + this.sentBody = writeBody; + } else { + this.sentBody += typeof writeBody === "object" ? JSON.stringify(writeBody) : writeBody; + } + if (cb) { + setImmediate(cb); + } + return true; + } + + public end(body?: unknown) { + if (body) { + this.write(body); + } + resolve({ + status: this.statusCode, + headers: this.headers, + body: this.sentBody, + }); + + if (this.callback) { + this.callback(); + } + } + + public on(event: string, callback: () => void) { + if (event !== "finish" && event !== "close") { + throw new Error("MockResponse only implements close and finish event"); + } + this.callback = callback; + } + } + const response = new MockResponse(); + return void handler(request, response as any, () => undefined); + }); +} + +export function checkAuthContext( + context: https.CallableContext | https.CallableRequest | tasks.TaskContext, + projectId: string, + userId: string +) { + expect(context.auth).to.not.be.undefined; + expect(context.auth).to.not.be.null; + expect(context.auth.uid).to.equal(userId); + expect(context.auth.token.uid).to.equal(userId); + expect(context.auth.token.sub).to.equal(userId); + expect(context.auth.token.aud).to.equal(projectId); + + // TaskContext & TaskRequest don't have instanceIdToken + if ({}.hasOwnProperty.call(context, "instanceIdToken")) { + expect((context as https.CallableContext).instanceIdToken).to.be.undefined; + } +} + +export function checkAppCheckContext( + context: https.CallableContext | https.CallableRequest, + projectId: string, + appId: string +) { + expect(context.app).to.not.be.undefined; + expect(context.app).to.not.be.null; + expect(context.app.appId).to.equal(appId); + expect(context.app.token.app_id).to.be.equal(appId); + expect(context.app.token.sub).to.be.equal(appId); + expect(context.app.token.aud).to.be.deep.equal([`projects/${projectId}`]); + expect(context.auth).to.be.undefined; + expect(context.instanceIdToken).to.be.undefined; +} diff --git a/spec/logger.spec.ts b/spec/logger.spec.ts new file mode 100644 index 000000000..a42a57ee3 --- /dev/null +++ b/spec/logger.spec.ts @@ -0,0 +1,221 @@ +import { expect } from "chai"; + +import * as logger from "../src/logger"; + +describe("logger", () => { + const stdoutWrite = process.stdout.write.bind(process.stdout); + const stderrWrite = process.stderr.write.bind(process.stderr); + let lastOut: string; + let lastErr: string; + + beforeEach(() => { + process.stdout.write = (msg: Buffer | string, cb?: any): boolean => { + lastOut = msg as string; + return stdoutWrite(msg, cb); + }; + process.stderr.write = (msg: Buffer | string, cb?: any): boolean => { + lastErr = msg as string; + return stderrWrite(msg, cb); + }; + }); + + afterEach(() => { + process.stdout.write = stdoutWrite; + process.stderr.write = stderrWrite; + }); + + function expectOutput(last: string, entry: any) { + return expect(JSON.parse(last.trim())).to.deep.eq(entry); + } + + function expectStdout(entry: any) { + return expectOutput(lastOut, entry); + } + + function expectStderr(entry: any) { + return expectOutput(lastErr, entry); + } + + describe("logging methods", () => { + it("should coalesce arguments into the message", () => { + logger.log("hello", { middle: "obj" }, "end message"); + expectStdout({ + severity: "INFO", + message: "hello { middle: 'obj' } end message", + }); + }); + + it("should merge structured data from the last argument", () => { + logger.log("hello", "world", { additional: "context" }); + expectStdout({ + severity: "INFO", + message: "hello world", + additional: "context", + }); + }); + + it("should not recognize null as a structured logging object", () => { + logger.log("hello", "world", null); + expectStdout({ + severity: "INFO", + message: "hello world null", + }); + }); + + it("should overwrite a 'message' field in structured object if a message is provided", () => { + logger.log("this instead", { test: true, message: "not this" }); + expectStdout({ + severity: "INFO", + message: "this instead", + test: true, + }); + }); + + it("should not overwrite a 'message' field in structured object if no other args are provided", () => { + logger.log({ test: true, message: "this" }); + expectStdout({ + severity: "INFO", + message: "this", + test: true, + }); + }); + }); + + describe("write", () => { + describe("structured logging", () => { + describe("write", () => { + it("should remove circular references", () => { + const circ: any = { b: "foo" }; + circ.circ = circ; + + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing circular", + circ, + }; + logger.write(entry); + expectStderr({ + severity: "ERROR", + message: "testing circular", + circ: { b: "foo", circ: "[Circular]" }, + }); + }); + + it("should remove circular references in arrays", () => { + const circ: any = { b: "foo" }; + circ.circ = [circ]; + + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing circular", + circ, + }; + logger.write(entry); + expectStderr({ + severity: "ERROR", + message: "testing circular", + circ: { b: "foo", circ: ["[Circular]"] }, + }); + }); + + it("should not detect duplicate object as circular", () => { + const obj: any = { a: "foo" }; + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing circular", + a: obj, + b: obj, + }; + logger.write(entry); + expectStderr({ + severity: "ERROR", + message: "testing circular", + a: { a: "foo" }, + b: { a: "foo" }, + }); + }); + + it("should not detect duplicate object in array as circular", () => { + const obj: any = { a: "foo" }; + const arr: any = [ + { a: obj, b: obj }, + { a: obj, b: obj }, + ]; + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing circular", + a: arr, + b: arr, + }; + logger.write(entry); + expectStderr({ + severity: "ERROR", + message: "testing circular", + a: [ + { a: { a: "foo" }, b: { a: "foo" } }, + { a: { a: "foo" }, b: { a: "foo" } }, + ], + b: [ + { a: { a: "foo" }, b: { a: "foo" } }, + { a: { a: "foo" }, b: { a: "foo" } }, + ], + }); + }); + + it("should not break on objects that override toJSON", () => { + const obj: any = { a: new Date("August 26, 1994 12:24:00Z") }; + + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing toJSON", + obj, + }; + logger.write(entry); + expectStderr({ + severity: "ERROR", + message: "testing toJSON", + obj: { a: "1994-08-26T12:24:00.000Z" }, + }); + }); + + it("should not alter parameters that are logged", () => { + const circ: any = { b: "foo" }; + circ.array = [circ]; + circ.object = circ; + const entry: logger.LogEntry = { + severity: "ERROR", + message: "testing circular", + circ, + }; + logger.write(entry); + + expect(circ.array[0].b).to.equal("foo"); + expect(circ.object.b).to.equal("foo"); + expect(circ.object.array[0].object.array[0].b).to.equal("foo"); + }); + + for (const severity of ["DEBUG", "INFO", "NOTICE"]) { + it(`should output ${severity} severity to stdout`, () => { + const entry: logger.LogEntry = { + severity: severity as logger.LogSeverity, + message: "test", + }; + logger.write(entry); + expectStdout(entry); + }); + } + + for (const severity of ["WARNING", "ERROR", "CRITICAL", "ALERT", "EMERGENCY"]) { + it(`should output ${severity} severity to stderr`, () => { + const entry: logger.LogEntry = { + severity: severity as logger.LogSeverity, + message: "test", + }; + logger.write(entry); + expectStderr(entry); + }); + } + }); + }); + }); +}); diff --git a/spec/params/params.spec.ts b/spec/params/params.spec.ts new file mode 100644 index 000000000..1a37c40cb --- /dev/null +++ b/spec/params/params.spec.ts @@ -0,0 +1,439 @@ +import { expect } from "chai"; +import * as params from "../../src/params"; + +describe("Params spec extraction", () => { + it("converts Expressions in the param default to strings", () => { + const bar = params.defineInt("BAR"); + expect( + params.defineString("FOO", { default: bar.notEquals(22).thenElse("asdf", "jkl;") }).toSpec() + .default + ).to.equal(`{{ params.BAR != 22 ? "asdf" : "jkl;" }}`); + }); + + it("converts RegExps in string validation parameters to strings", () => { + const foo = params.defineString("FOO", { input: { text: { validationRegex: /\d{5}/ } } }); + expect(foo.toSpec().input).to.deep.equal({ text: { validationRegex: "\\d{5}" } }); + }); +}); + +describe("Params value extraction", () => { + beforeEach(() => { + process.env.A_STRING = "asdf"; + process.env.SAME_STRING = "asdf"; + process.env.DIFF_STRING = "jkl;"; + process.env.AN_INT = "-11"; + process.env.SAME_INT = "-11"; + process.env.DIFF_INT = "22"; + process.env.PI = "3.14159"; + process.env.TRUE = "true"; + process.env.FALSE = "false"; + process.env.LIST = JSON.stringify(["a", "b", "c"]); + process.env.BAD_LIST = JSON.stringify(["a", 22, "c"]); + process.env.ESCAPED_LIST = JSON.stringify(["f\to\no"]); + process.env.A_SECRET_STRING = "123456supersecret"; + process.env.STRIPE_CONFIG = JSON.stringify({ + apiKey: "sk_test_123", + webhookSecret: "whsec_456", + clientId: "ca_789", + }); + process.env.INVALID_JSON_SECRET = "not valid json{"; + }); + + afterEach(() => { + params.clearParams(); + delete process.env.A_STRING; + delete process.env.SAME_STRING; + delete process.env.DIFF_STRING; + delete process.env.AN_INT; + delete process.env.SAME_INT; + delete process.env.DIFF_INT; + delete process.env.TRUE; + delete process.env.PI; + delete process.env.TRUE; + delete process.env.FALSE; + delete process.env.LIST; + delete process.env.BAD_LIST; + delete process.env.ESCAPED_LIST; + delete process.env.A_SECRET_STRING; + delete process.env.STRIPE_CONFIG; + delete process.env.INVALID_JSON_SECRET; + }); + + it("extracts identity params from the environment", () => { + const strParam = params.defineString("A_STRING"); + expect(strParam.value()).to.equal("asdf"); + + const intParam = params.defineInt("AN_INT"); + expect(intParam.value()).to.equal(-11); + + const boolParam = params.defineBoolean("TRUE"); + expect(boolParam.value()).to.be.true; + + const floatParam = params.defineFloat("PI"); + expect(floatParam.value()).to.equal(3.14159); + + const falseParam = params.defineBoolean("FALSE"); + expect(falseParam.value()).to.be.false; + + const listParam = params.defineList("LIST"); + expect(listParam.value()).to.deep.equal(["a", "b", "c"]); + + const listParamWithEscapes = params.defineList("ESCAPED_LIST"); + expect(listParamWithEscapes.value()).to.deep.equal(["f\to\no"]); + const secretParam = params.defineSecret("A_SECRET_STRING"); + expect(secretParam.value()).to.equal("123456supersecret"); + + const jsonSecretParam = params.defineJsonSecret("STRIPE_CONFIG"); + const secretValue = jsonSecretParam.value(); + expect(secretValue).to.deep.equal({ + apiKey: "sk_test_123", + webhookSecret: "whsec_456", + clientId: "ca_789", + }); + }); + + it("extracts the special case internal params from env.FIREBASE_CONFIG", () => { + process.env.FIREBASE_CONFIG = JSON.stringify({ + projectId: "foo", + storageBucket: "foo.appspot.com", + databaseURL: "https://foo.firebaseio.com", + }); + expect(params.databaseURL.value()).to.equal("https://foo.firebaseio.com"); + expect(params.gcloudProject.value()).to.equal("foo"); + expect(params.projectID.value()).to.equal("foo"); + expect(params.storageBucket.value()).to.equal("foo.appspot.com"); + + process.env.FIREBASE_CONFIG = JSON.stringify({ projectId: "foo" }); + expect(params.databaseURL.value()).to.equal(""); + expect(params.gcloudProject.value()).to.equal("foo"); + expect(params.projectID.value()).to.equal("foo"); + expect(params.storageBucket.value()).to.equal(""); + + process.env.FIREBASE_CONFIG = JSON.stringify({}); + expect(params.databaseURL.value()).to.equal(""); + expect(params.gcloudProject.value()).to.equal(""); + expect(params.projectID.value()).to.equal(""); + expect(params.storageBucket.value()).to.equal(""); + + delete process.env.FIREBASE_CONFIG; + }); + + it("falls back on the javascript zero values in case of type mismatch", () => { + const stringToInt = params.defineInt("A_STRING"); + expect(stringToInt.value()).to.equal(0); + + const stringToBool = params.defineBoolean("A_STRING"); + expect(stringToBool.value()).to.equal(false); + + const listToInt = params.defineInt("LIST"); + expect(listToInt.value()).to.equal(0); + }); + + it("falls back on the javascript zero values in case a list param's is unparsable as string[]", () => { + const notAllStrings = params.defineList("BAD_LIST"); + expect(notAllStrings.value()).to.deep.equal([]); + + const intToList = params.defineList("AN_INT"); + expect(intToList.value()).to.deep.equal([]); + }); + + it("returns a boolean value for Comparison expressions", () => { + const str = params.defineString("A_STRING"); + const sameStr = params.defineString("SAME_STRING"); + const diffStr = params.defineString("DIFF_STRING"); + expect(str.equals(sameStr).value()).to.be.true; + expect(str.equals("asdf").value()).to.be.true; + expect(str.equals(diffStr).value()).to.be.false; + expect(str.equals("jkl;").value()).to.be.false; + expect(str.notEquals(diffStr).value()).to.be.true; + expect(str.notEquals("jkl;").value()).to.be.true; + expect(str.lessThan(diffStr).value()).to.be.true; + expect(str.lessThan("jkl;").value()).to.be.true; + expect(str.lessThanorEqualTo(diffStr).value()).to.be.true; + expect(str.lessThanorEqualTo("jkl;").value()).to.be.true; + expect(str.greaterThan(diffStr).value()).to.be.false; + expect(str.greaterThan("jkl;").value()).to.be.false; + expect(str.greaterThanOrEqualTo(diffStr).value()).to.be.false; + expect(str.greaterThanOrEqualTo("jkl;").value()).to.be.false; + + const int = params.defineInt("AN_INT"); + const sameInt = params.defineInt("SAME_INT"); + const diffInt = params.defineInt("DIFF_INT"); + expect(int.equals(sameInt).value()).to.be.true; + expect(int.equals(-11).value()).to.be.true; + expect(int.equals(diffInt).value()).to.be.false; + expect(int.equals(22).value()).to.be.false; + expect(int.notEquals(diffInt).value()).to.be.true; + expect(int.notEquals(22).value()).to.be.true; + expect(int.greaterThan(diffInt).value()).to.be.false; + expect(int.greaterThan(22).value()).to.be.false; + expect(int.greaterThanOrEqualTo(diffInt).value()).to.be.false; + expect(int.greaterThanOrEqualTo(22).value()).to.be.false; + expect(int.lessThan(diffInt).value()).to.be.true; + expect(int.lessThan(22).value()).to.be.true; + expect(int.lessThanorEqualTo(diffInt).value()).to.be.true; + expect(int.lessThanorEqualTo(22).value()).to.be.true; + }); + + it("can use all the comparison operators when explicitly requested", () => { + const jkl = params.defineString("DIFF_STRING"); + expect(jkl.cmp(">", "asdf").value()).to.be.true; + expect(jkl.cmp(">", "jkl;").value()).to.be.false; + expect(jkl.cmp(">", "qwerty").value()).to.be.false; + expect(jkl.cmp(">=", "asdf").value()).to.be.true; + expect(jkl.cmp(">=", "jkl;").value()).to.be.true; + expect(jkl.cmp(">=", "qwerty").value()).to.be.false; + expect(jkl.cmp("<", "asdf").value()).to.be.false; + expect(jkl.cmp("<", "jkl;").value()).to.be.false; + expect(jkl.cmp("<", "qwerty").value()).to.be.true; + expect(jkl.cmp("<=", "asdf").value()).to.be.false; + expect(jkl.cmp("<=", "jkl;").value()).to.be.true; + expect(jkl.cmp("<=", "qwerty").value()).to.be.true; + + const twentytwo = params.defineInt("DIFF_INT"); + expect(twentytwo.cmp(">", 11).value()).to.be.true; + expect(twentytwo.cmp(">", 22).value()).to.be.false; + expect(twentytwo.cmp(">", 33).value()).to.be.false; + expect(twentytwo.cmp(">=", 11).value()).to.be.true; + expect(twentytwo.cmp(">=", 22).value()).to.be.true; + expect(twentytwo.cmp(">=", 33).value()).to.be.false; + expect(twentytwo.cmp("<", 11).value()).to.be.false; + expect(twentytwo.cmp("<", 22).value()).to.be.false; + expect(twentytwo.cmp("<", 33).value()).to.be.true; + expect(twentytwo.cmp("<=", 11).value()).to.be.false; + expect(twentytwo.cmp("<=", 22).value()).to.be.true; + expect(twentytwo.cmp("<=", 33).value()).to.be.true; + + const trueParam = params.defineBoolean("TRUE"); + expect(trueParam.cmp(">", true).value()).to.be.false; + expect(trueParam.cmp(">", false).value()).to.be.true; + expect(trueParam.cmp(">=", true).value()).to.be.true; + expect(trueParam.cmp(">=", false).value()).to.be.true; + expect(trueParam.cmp("<", true).value()).to.be.false; + expect(trueParam.cmp("<", false).value()).to.be.false; + expect(trueParam.cmp("<=", true).value()).to.be.true; + expect(trueParam.cmp("<=", false).value()).to.be.false; + }); + + it("can test list params for equality but not < or >", () => { + const p1 = params.defineList("LIST"); + const p2 = params.defineList("ESCAPED_LIST"); + + expect(p1.equals(p1).value()).to.be.true; + expect(p1.notEquals(p1).value()).to.be.false; + expect(p1.equals(p2).value()).to.be.false; + expect(p1.notEquals(p2).value()).to.be.true; + + expect(() => p1.greaterThan(p1).value()).to.throw; + }); + + it("can select the output of a ternary expression based on the comparison", () => { + const trueExpr = params.defineString("A_STRING").equals(params.defineString("SAME_STRING")); + expect(trueExpr.thenElse(1, 0).value()).to.equal(1); + const falseExpr = params.defineInt("AN_INT").equals(params.defineInt("DIFF_INT")); + expect(falseExpr.thenElse(1, 0).value()).to.equal(0); + + const twentytwo = params.defineInt("DIFF_INT"); + expect(trueExpr.thenElse(twentytwo, 0).value()).to.equal(22); + expect(falseExpr.thenElse(1, twentytwo).value()).to.equal(22); + }); +}); + +describe("defineJsonSecret", () => { + beforeEach(() => { + process.env.VALID_JSON = JSON.stringify({ key: "value", nested: { foo: "bar" } }); + process.env.INVALID_JSON = "not valid json{"; + process.env.EMPTY_OBJECT = JSON.stringify({}); + process.env.ARRAY_JSON = JSON.stringify([1, 2, 3]); + }); + + afterEach(() => { + params.clearParams(); + delete process.env.VALID_JSON; + delete process.env.INVALID_JSON; + delete process.env.EMPTY_OBJECT; + delete process.env.ARRAY_JSON; + delete process.env.FUNCTIONS_CONTROL_API; + }); + + it("parses valid JSON secrets correctly", () => { + const jsonSecret = params.defineJsonSecret("VALID_JSON"); + const value = jsonSecret.value(); + expect(value).to.deep.equal({ key: "value", nested: { foo: "bar" } }); + }); + + it("throws an error when JSON is invalid", () => { + const jsonSecret = params.defineJsonSecret("INVALID_JSON"); + expect(() => jsonSecret.value()).to.throw( + '"INVALID_JSON" could not be parsed as JSON. Please verify its value in Secret Manager.' + ); + }); + + it("throws an error when secret is not found", () => { + const jsonSecret = params.defineJsonSecret("NON_EXISTENT"); + expect(() => jsonSecret.value()).to.throw( + 'No value found for secret parameter "NON_EXISTENT". A function can only access a secret if you include the secret in the function\'s dependency array.' + ); + }); + + it("handles empty object JSON", () => { + const jsonSecret = params.defineJsonSecret("EMPTY_OBJECT"); + const value = jsonSecret.value(); + expect(value).to.deep.equal({}); + }); + + it("handles array JSON", () => { + const jsonSecret = params.defineJsonSecret("ARRAY_JSON"); + const value = jsonSecret.value(); + expect(value).to.deep.equal([1, 2, 3]); + }); + + it("throws an error when accessed during deployment", () => { + process.env.FUNCTIONS_CONTROL_API = "true"; + const jsonSecret = params.defineJsonSecret("VALID_JSON"); + expect(() => jsonSecret.value()).to.throw( + 'Cannot access the value of secret "VALID_JSON" during function deployment. Secret values are only available at runtime.' + ); + }); + + it("supports destructuring of JSON objects", () => { + process.env.STRIPE_CONFIG = JSON.stringify({ + apiKey: "sk_test_123", + webhookSecret: "whsec_456", + clientId: "ca_789", + }); + + const stripeConfig = params.defineJsonSecret("STRIPE_CONFIG"); + const { apiKey, webhookSecret, clientId } = stripeConfig.value(); + + expect(apiKey).to.equal("sk_test_123"); + expect(webhookSecret).to.equal("whsec_456"); + expect(clientId).to.equal("ca_789"); + + delete process.env.STRIPE_CONFIG; + }); + + it("registers the param in declaredParams", () => { + const initialLength = params.declaredParams.length; + const jsonSecret = params.defineJsonSecret("TEST_SECRET"); + expect(params.declaredParams.length).to.equal(initialLength + 1); + expect(params.declaredParams[params.declaredParams.length - 1]).to.equal(jsonSecret); + }); + + it("has correct type and format annotation in toSpec", () => { + const jsonSecret = params.defineJsonSecret("TEST_SECRET"); + const spec = jsonSecret.toSpec(); + expect(spec.type).to.equal("secret"); + expect(spec.name).to.equal("TEST_SECRET"); + expect(spec.format).to.equal("json"); + }); +}); + +describe("Params as CEL", () => { + it("internal expressions behave like strings", () => { + const str = params.defineString("A_STRING"); + + expect(params.projectID.toCEL()).to.equal(`{{ params.PROJECT_ID }}`); + expect(params.projectID.equals("foo").toCEL()).to.equal(`{{ params.PROJECT_ID == "foo" }}`); + expect(params.projectID.equals(str).toCEL()).to.equal( + `{{ params.PROJECT_ID == params.A_STRING }}` + ); + expect(params.gcloudProject.toCEL()).to.equal(`{{ params.GCLOUD_PROJECT }}`); + expect(params.gcloudProject.equals("foo").toCEL()).to.equal( + `{{ params.GCLOUD_PROJECT == "foo" }}` + ); + expect(params.gcloudProject.equals(str).toCEL()).to.equal( + `{{ params.GCLOUD_PROJECT == params.A_STRING }}` + ); + expect(params.databaseURL.toCEL()).to.equal(`{{ params.DATABASE_URL }}`); + expect(params.databaseURL.equals("foo").toCEL()).to.equal(`{{ params.DATABASE_URL == "foo" }}`); + expect(params.databaseURL.equals(str).toCEL()).to.equal( + `{{ params.DATABASE_URL == params.A_STRING }}` + ); + expect(params.storageBucket.toCEL()).to.equal(`{{ params.STORAGE_BUCKET }}`); + expect(params.storageBucket.equals("foo").toCEL()).to.equal( + `{{ params.STORAGE_BUCKET == "foo" }}` + ); + expect(params.storageBucket.equals(str).toCEL()).to.equal( + `{{ params.STORAGE_BUCKET == params.A_STRING }}` + ); + }); + + it("identity expressions", () => { + expect(params.defineString("FOO").toCEL()).to.equal("{{ params.FOO }}"); + expect(params.defineInt("FOO").toCEL()).to.equal("{{ params.FOO }}"); + expect(params.defineBoolean("FOO").toCEL()).to.equal("{{ params.FOO }}"); + }); + + it("comparison expressions", () => { + expect(params.defineString("FOO").equals(params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO == params.BAR }}" + ); + expect(params.defineString("FOO").cmp("==", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO == params.BAR }}" + ); + expect(params.defineString("FOO").cmp("!=", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO != params.BAR }}" + ); + expect(params.defineString("FOO").cmp(">", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO > params.BAR }}" + ); + expect(params.defineString("FOO").cmp(">=", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO >= params.BAR }}" + ); + expect(params.defineString("FOO").cmp("<", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO < params.BAR }}" + ); + expect(params.defineString("FOO").cmp("<=", params.defineString("BAR")).toCEL()).to.equal( + "{{ params.FOO <= params.BAR }}" + ); + + expect(params.defineString("FOO").equals("BAR").toCEL()).to.equal('{{ params.FOO == "BAR" }}'); + expect(params.defineString("FOO").cmp("==", "BAR").toCEL()).to.equal( + '{{ params.FOO == "BAR" }}' + ); + expect(params.defineString("FOO").cmp("!=", "BAR").toCEL()).to.equal( + '{{ params.FOO != "BAR" }}' + ); + expect(params.defineString("FOO").cmp(">", "BAR").toCEL()).to.equal('{{ params.FOO > "BAR" }}'); + expect(params.defineString("FOO").cmp(">=", "BAR").toCEL()).to.equal( + '{{ params.FOO >= "BAR" }}' + ); + expect(params.defineString("FOO").cmp("<", "BAR").toCEL()).to.equal('{{ params.FOO < "BAR" }}'); + expect(params.defineString("FOO").cmp("<=", "BAR").toCEL()).to.equal( + '{{ params.FOO <= "BAR" }}' + ); + + expect(params.defineInt("FOO").equals(-11).toCEL()).to.equal("{{ params.FOO == -11 }}"); + expect(params.defineInt("FOO").cmp("==", -11).toCEL()).to.equal("{{ params.FOO == -11 }}"); + expect(params.defineInt("FOO").cmp("!=", -11).toCEL()).to.equal("{{ params.FOO != -11 }}"); + expect(params.defineInt("FOO").cmp(">", -11).toCEL()).to.equal("{{ params.FOO > -11 }}"); + expect(params.defineInt("FOO").cmp(">=", -11).toCEL()).to.equal("{{ params.FOO >= -11 }}"); + expect(params.defineInt("FOO").cmp("<", -11).toCEL()).to.equal("{{ params.FOO < -11 }}"); + expect(params.defineInt("FOO").cmp("<=", -11).toCEL()).to.equal("{{ params.FOO <= -11 }}"); + }); + + it("ternary expressions", () => { + const booleanExpr = params.defineBoolean("BOOL"); + const cmpExpr = params.defineInt("A").cmp("!=", params.defineInt("B")); + + expect(booleanExpr.thenElse("asdf", "jkl;").toCEL()).to.equal( + '{{ params.BOOL ? "asdf" : "jkl;" }}' + ); + expect(booleanExpr.thenElse(-11, 22).toCEL()).to.equal("{{ params.BOOL ? -11 : 22 }}"); + expect(booleanExpr.thenElse(false, true).toCEL()).to.equal("{{ params.BOOL ? false : true }}"); + expect( + booleanExpr.thenElse(params.defineString("FOO"), params.defineString("BAR")).toCEL() + ).to.equal("{{ params.BOOL ? params.FOO : params.BAR }}"); + expect(cmpExpr.thenElse("asdf", "jkl;").toCEL()).to.equal( + '{{ params.A != params.B ? "asdf" : "jkl;" }}' + ); + expect(cmpExpr.thenElse(-11, 22).toCEL()).to.equal("{{ params.A != params.B ? -11 : 22 }}"); + expect(cmpExpr.thenElse(false, true).toCEL()).to.equal( + "{{ params.A != params.B ? false : true }}" + ); + expect( + cmpExpr.thenElse(params.defineString("FOO"), params.defineString("BAR")).toCEL() + ).to.equal("{{ params.A != params.B ? params.FOO : params.BAR }}"); + }); +}); diff --git a/spec/providers/analytics.spec.ts b/spec/providers/analytics.spec.ts deleted file mode 100644 index cef1d8328..000000000 --- a/spec/providers/analytics.spec.ts +++ /dev/null @@ -1,241 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the 'Software'), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as analytics from '../../src/providers/analytics'; -import { expect } from 'chai'; -import { LegacyEvent } from '../../src/cloud-functions'; -import * as analytics_spec_input from './analytics.spec.input'; - -describe('Analytics Functions', () => { - describe('EventBuilder', () => { - before(() => { - process.env.GCLOUD_PROJECT = 'project1'; - }); - - after(() => { - delete process.env.GCLOUD_PROJECT; - }); - - describe('#onLog', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = analytics.event('first_open').onLog(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/google.firebase.analytics/eventTypes/event.log', - resource: 'projects/project1/events/first_open', - service: 'app-measurement.com', - }, - }); - }); - }); - - describe('#dataConstructor', () => { - it('should handle an event with the appropriate fields', () => { - const cloudFunction = analytics.event('first_open').onLog((data: analytics.AnalyticsEvent) => data); - - // The event data delivered over the wire will be the JSON for an AnalyticsEvent: - // https://firebase.google.com/docs/auth/admin/manage-users#retrieve_user_data - let event: LegacyEvent = { - eventId: 'f2e2f0bf-2e47-4d92-b009-e7a375ecbd3e', - eventType: 'providers/google.firebase.analytics/eventTypes/event.log', - resource: 'projects/myUnitTestProject/events/first_open', - data: { - userDim: { - userId: 'hi!', - }, - }, - }; - - return expect(cloudFunction(event)).to.eventually.deep.equal({ - params: {}, - user: { - userId: 'hi!', - userProperties: {}, - }, - }); - }); - - it('should remove xValues', () => { - const cloudFunction = analytics.event('first_open').onLog((data: analytics.AnalyticsEvent) => data); - - // Incoming events will have four kinds of "xValue" fields: "intValue", - // "stringValue", "doubleValue" and "floatValue". We expect those to get - // flattened away, leaving just their values. - let event: LegacyEvent = { - data: { - eventDim: - [ - { - date: '20170202', - name: 'Loaded_In_Background', - params: { - build: { - stringValue: '1350', - }, - calls_remaining: { - intValue: '10', - }, - goats_teleported: { - doubleValue: 1.1, - }, - boat_boyancy: { - floatValue: 133.7, - }, - }, - }, - ], - userDim: { - userProperties: { - foo: { - value: { - stringValue: 'bar', - }, - }, - }, - }, - }, - }; - - return expect(cloudFunction(event)).to.eventually.deep.equal({ - reportingDate: '20170202', - name: 'Loaded_In_Background', - params: { - build: '1350', - calls_remaining: 10, - goats_teleported: 1.1, - boat_boyancy: 133.7, - }, - user: { - userProperties: { - foo: { - value: 'bar', - }, - }, - }, - }); - }); - - it('should change microsecond timestamps to ISO strings, and offsets to millis', () => { - const cloudFunction = analytics.event('first_open').onLog((data: analytics.AnalyticsEvent) => data); - - let event: LegacyEvent = { - data: { - eventDim: - [ - { - date: '20170202', - name: 'Loaded_In_Background', - timestampMicros: '1489080600000000', - previousTimestampMicros: '526657020000000', - }, - ], - userDim: { - firstOpenTimestampMicros: '577978620000000', - userProperties: { - foo: { - setTimestampUsec: '514820220000000', - }, - }, - bundleInfo: { - serverTimestampOffsetMicros: 9876789, - }, - }, - }, - }; - - return expect(cloudFunction(event)).to.eventually.deep.equal({ - reportingDate: '20170202', - name: 'Loaded_In_Background', - params: {}, - logTime: '2017-03-09T17:30:00.000Z', - previousLogTime: '1986-09-09T13:37:00.000Z', - user: { - firstOpenTime: '1988-04-25T13:37:00.000Z', - userProperties: { - foo: { - setTime: '1986-04-25T13:37:00.000Z', - }, - }, - bundleInfo: { - serverTimestampOffset: 9877, - }, - }, - }); - }); - - it('should populate currency fields', () => { - const cloudFunction = analytics.event('first_open').onLog((data: analytics.AnalyticsEvent) => data); - - // Incoming events will have four kinds of "xValue" fields: "intValue", - // "stringValue", "doubleValue" and "floatValue". We expect those to get - // flattened away, leaving just their values. - // - // xValues in eventDim[...].params should also populate a 'rawValue' field - // that always contains a string. - // - // Separately, the input has a number of microsecond timestamps that we'd - // like to rename and scale down to milliseconds. - let event: LegacyEvent = { - data: { - eventDim: - [ - { - date: '20170202', - name: 'Loaded_In_Background', - valueInUsd: 123.4, - }, - ], - }, - }; - - return expect(cloudFunction(event)).to.eventually.deep.equal({ - reportingDate: '20170202', - name: 'Loaded_In_Background', - params: {}, - valueInUSD: 123.4, // Field renamed Usd -> USD. - }); - }); - - it('should recognize all the fields the payload can contain', () => { - const cloudFunction = analytics.event('first_open').onLog((data: analytics.AnalyticsEvent) => data); - // The payload in analytics_spec_input contains all possible fields at least once. - return expect(cloudFunction(analytics_spec_input.fullPayload)) - .to.eventually.deep.equal(analytics_spec_input.data); - }); - }); - }); - - describe('process.env.GCLOUD_PROJECT not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => analytics.event('event').onLog(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => analytics.event('event').onLog(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = analytics.event('event').onLog(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); -}); diff --git a/spec/providers/auth.spec.ts b/spec/providers/auth.spec.ts deleted file mode 100644 index 989f4cf22..000000000 --- a/spec/providers/auth.spec.ts +++ /dev/null @@ -1,182 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as auth from '../../src/providers/auth'; -import { expect } from 'chai'; -import * as firebase from 'firebase-admin'; -import { CloudFunction } from '../../src'; - -describe('Auth Functions', () => { - describe('AuthBuilder', () => { - let handler: (user: firebase.auth.UserRecord) => PromiseLike | any; - - before(() => { - process.env.GCLOUD_PROJECT = 'project1'; - }); - - after(() => { - delete process.env.GCLOUD_PROJECT; - }); - - describe('#onCreate', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = auth.user().onCreate(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/firebase.auth/eventTypes/user.create', - resource: 'projects/project1', - service: 'firebaseauth.googleapis.com', - }, - }); - }); - }); - - describe('#onDelete', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = auth.user().onDelete(handler); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/firebase.auth/eventTypes/user.delete', - resource: 'projects/project1', - service: 'firebaseauth.googleapis.com', - }, - }); - }); - }); - - describe('#_dataConstructor', () => { - let cloudFunctionCreate: CloudFunction; - let cloudFunctionDelete: CloudFunction; - let event: any; - - before(() => { - cloudFunctionCreate = auth.user().onCreate((data: firebase.auth.UserRecord) => data); - cloudFunctionDelete = auth.user().onDelete((data: firebase.auth.UserRecord) => data); - event = { - data: { - metadata: { - createdAt: '2016-12-15T19:37:37.059Z', - lastSignedInAt: '2017-01-01T00:00:00.000Z', - }, - }, - }; - }); - - it('should transform wire format for UserRecord into v5.0.0 format', () => { - return Promise.all([ - cloudFunctionCreate(event).then((data: any) => { - expect(data.metadata.creationTime).to.equal('2016-12-15T19:37:37.059Z'); - expect(data.metadata.lastSignInTime).to.equal('2017-01-01T00:00:00.000Z'); - }), - cloudFunctionDelete(event).then((data: any) => { - expect(data.metadata.creationTime).to.equal('2016-12-15T19:37:37.059Z'); - expect(data.metadata.lastSignInTime).to.equal('2017-01-01T00:00:00.000Z'); - }), - ]); - }); - - it('should handle new wire format if/when there is a change', () => { - const newEvent = { - data: { - metadata: { - creationTime: '2016-12-15T19:37:37.059Z', - lastSignInTime: '2017-01-01T00:00:00.000Z', - }, - }, - }; - - return Promise.all([ - cloudFunctionCreate(newEvent).then((data: any) => { - expect(data.metadata.creationTime).to.equal('2016-12-15T19:37:37.059Z'); - expect(data.metadata.lastSignInTime).to.equal('2017-01-01T00:00:00.000Z'); - }), - cloudFunctionDelete(newEvent).then((data: any) => { - expect(data.metadata.creationTime).to.equal('2016-12-15T19:37:37.059Z'); - expect(data.metadata.lastSignInTime).to.equal('2017-01-01T00:00:00.000Z'); - }), - ]); - }); - }); - }); - - describe('userRecordConstructor', () => { - it('will provide falsey values for fields that are not in raw wire data', () => { - const record = auth.userRecordConstructor({ uid: '123'}); - expect(record.toJSON()).to.deep.equal({ - uid: '123', - email: null, - emailVerified: false, - displayName: null, - photoURL: null, - phoneNumber: null, - disabled: false, - providerData: [], - customClaims: {}, - passwordSalt: null, - passwordHash: null, - tokensValidAfterTime: null, - metadata: { - creationTime: null, - lastSignInTime: null, - }, - }); - }); - - it('will not interfere with fields that are in raw wire data', () => { - const raw: any = { - uid: '123', - email: 'email@gmail.com', - emailVerified: true, - displayName: 'User', - photoURL: 'url', - phoneNumber: '1233332222', - disabled: true, - providerData: [], - customClaims: {}, - passwordSalt: 'abc', - passwordHash: 'def', - tokensValidAfterTime: '2027-02-02T23:01:19.797Z', - metadata: { - creationTime: '2017-02-02T23:06:26.124Z', - lastSignInTime: '2017-02-02T23:01:19.797Z', - }, - }; - const record = auth.userRecordConstructor(raw); - expect(record.toJSON()).to.deep.equal(raw); - }); - }); - - describe('process.env.GCLOUD_PROJECT not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => auth.user().onCreate(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => auth.user().onCreate(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = auth.user().onCreate(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); -}); diff --git a/spec/providers/crashlytics.spec.ts b/spec/providers/crashlytics.spec.ts deleted file mode 100644 index 2b678eefb..000000000 --- a/spec/providers/crashlytics.spec.ts +++ /dev/null @@ -1,93 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as crashlytics from '../../src/providers/crashlytics'; -import { apps as appsNamespace } from '../../src/apps'; -import { expect } from 'chai'; - -describe('Crashlytics Functions', () => { - describe('Issue Builder', () => { - before(() => { - appsNamespace.init(); - process.env.GCLOUD_PROJECT = 'project1'; - }); - - after(() => { - delete appsNamespace.singleton; - delete process.env.GCLOUD_PROJECT; - }); - - describe('#onNew', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = crashlytics.issue().onNew(data => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/firebase.crashlytics/eventTypes/issue.new', - resource: 'projects/project1', - service: 'fabric.io', - }, - }); - }); - }); - - describe('#onRegressed', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = crashlytics.issue().onRegressed(data => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/firebase.crashlytics/eventTypes/issue.regressed', - resource: 'projects/project1', - service: 'fabric.io', - }, - }); - }); - }); - - describe('#onVelocityAlert', () => { - it('should return a TriggerDefinition with appropriate values', () => { - const cloudFunction = crashlytics.issue().onVelocityAlert(data => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'providers/firebase.crashlytics/eventTypes/issue.velocityAlert', - resource: 'projects/project1', - service: 'fabric.io', - }, - }); - }); - }); - }); - - describe('process.env.GCLOUD_PROJECT not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => crashlytics.issue().onNew(() => null)).to.not.throw(Error); - }); - - it('should throw if __trigger is accessed', () => { - expect(() => crashlytics.issue().onNew(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = crashlytics.issue().onNew(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); -}); diff --git a/spec/providers/database.spec.ts b/spec/providers/database.spec.ts deleted file mode 100644 index d0cc20f9e..000000000 --- a/spec/providers/database.spec.ts +++ /dev/null @@ -1,420 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as database from '../../src/providers/database'; -import { expect as expect } from 'chai'; -import { apps as appsNamespace } from '../../src/apps'; -import { applyChange } from '../../src/utils'; - -describe('Database Functions', () => { - - describe('DatabaseBuilder', () => { - // TODO add tests for building a data or change based on the type of operation - - before(() => { - process.env.FIREBASE_CONFIG = JSON.stringify({ - databaseURL: 'https://subdomain.firebaseio.com', - }); - appsNamespace.init(); - }); - - after(() => { - delete process.env.FIREBASE_CONFIG; - delete appsNamespace.singleton; - }); - - describe('#onWrite()', () => { - it('should return "ref.write" as the event type', () => { - let eventType = database.ref('foo').onWrite(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq('providers/google.firebase.database/eventTypes/ref.write'); - }); - - it('should construct a proper resource path', () => { - let resource = database.ref('foo').onWrite(() => null).__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/subdomain/refs/foo'); - }); - - it('should let developers choose a database instance', () => { - let func = database.instance('custom').ref('foo').onWrite(() => null); - let resource = func.__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/custom/refs/foo'); - }); - - it('should return a handler that emits events with a proper DataSnapshot', () => { - let handler = database.ref('/users/{id}').onWrite(change => { - expect(change.after.val()).to.deep.equal({ foo: 'bar' }); - }); - - return handler({ - data: { - data: null, - delta: { foo: 'bar' }, - }, - resource: 'projects/_/instances/subdomains/refs/users', - eventType: 'providers/google.firebase.database/eventTypes/ref.write', - }); - }); - }); - - describe('#onCreate()', () => { - it('should return "ref.create" as the event type', () => { - let eventType = database.ref('foo').onCreate(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq('providers/google.firebase.database/eventTypes/ref.create'); - }); - - it('should construct a proper resource path', () => { - let resource = database.ref('foo').onCreate(() => null).__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/subdomain/refs/foo'); - }); - - it('should let developers choose a database instance', () => { - let func = database.instance('custom').ref('foo').onCreate(() => null); - let resource = func.__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/custom/refs/foo'); - }); - - it('should return a handler that emits events with a proper DataSnapshot', () => { - let handler = database.ref('/users/{id}').onCreate(data => { - expect(data.val()).to.deep.equal({ foo: 'bar' }); - }); - - return handler({ - data: { - data: null, - delta: { foo: 'bar' }, - }, - resource: 'projects/_/instances/subdomains/refs/users', - eventType: 'providers/google.firebase.database/eventTypes/ref.create', - }); - }); - }); - - describe('#onUpdate()', () => { - it('should return "ref.update" as the event type', () => { - let eventType = database.ref('foo').onUpdate(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq('providers/google.firebase.database/eventTypes/ref.update'); - }); - - it('should construct a proper resource path', () => { - let resource = database.ref('foo').onUpdate(() => null).__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/subdomain/refs/foo'); - }); - - it('should let developers choose a database instance', () => { - let func = database.instance('custom').ref('foo').onUpdate(() => null); - let resource = func.__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/custom/refs/foo'); - }); - - it('should return a handler that emits events with a proper DataSnapshot', () => { - let handler = database.ref('/users/{id}').onUpdate(change => { - expect(change.after.val()).to.deep.equal({ foo: 'bar' }); - }); - - return handler({ - data: { - data: null, - delta: { foo: 'bar' }, - }, - resource: 'projects/_/instances/subdomains/refs/users', - eventType: 'providers/google.firebase.database/eventTypes/ref.update', - }); - }); - }); - - describe('#onDelete()', () => { - it('should return "ref.delete" as the event type', () => { - let eventType = database.ref('foo').onDelete(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq('providers/google.firebase.database/eventTypes/ref.delete'); - }); - - it('should construct a proper resource path', () => { - let resource = database.ref('foo').onDelete(() => null).__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/subdomain/refs/foo'); - }); - - it('should let developers choose a database instance', () => { - let func = database.instance('custom').ref('foo').onDelete(() => null); - let resource = func.__trigger.eventTrigger.resource; - expect(resource).to.eq('projects/_/instances/custom/refs/foo'); - }); - - it('should return a handler that emits events with a proper DataSnapshot', () => { - let handler = database.ref('/users/{id}').onDelete(data => { - expect(data.val()).to.deep.equal({ foo: 'bar' }); - }); - - return handler({ - data: { - data: { foo: 'bar' }, - delta: null, - }, - resource: 'projects/_/instances/subdomains/refs/users', - eventType: 'providers/google.firebase.database/eventTypes/ref.delete', - }); - }); - }); - - }); - - describe('process.env.FIREBASE_CONFIG not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => database.ref('/path').onWrite(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => database.ref('/path').onWrite(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = database.ref('/path').onWrite(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); - - describe('resourceToInstanceAndPath', () => { - it('should return the correct instance and path strings', () => { - let [instance, path] = database.resourceToInstanceAndPath('projects/_/instances/foo/refs/bar'); - expect(instance).to.equal('https://foo.firebaseio.com'); - expect(path).to.equal('/bar'); - }); - }); - - describe('DataSnapshot', () => { - let subject: any; - const apps = new appsNamespace.Apps(); - - let populate = (data: any) => { - let [instance, path] = database.resourceToInstanceAndPath('projects/_/instances/other-subdomain/refs/foo'); - subject = new database.DataSnapshot( - data, - path, - apps.admin, - instance - ); - }; - - describe('#ref: firebase.database.Reference', () => { - it('should return a ref for correct instance, not the default instance', () => { - populate({}); - expect(subject.ref.toJSON()).to.equal('https://other-subdomain.firebaseio.com/foo'); - }); - }); - - describe('#val(): any', () => { - it('should return child values based on the child path', () => { - populate(applyChange({ a: { b: 'c' } }, { a: { d: 'e' } })); - expect(subject.child('a').val()).to.deep.equal({ b: 'c', d: 'e' }); - }); - - it('should return null for children past a leaf', () => { - populate(applyChange({ a: 23 }, { b: 33 })); - expect(subject.child('a/b').val()).to.be.null; - expect(subject.child('b/c').val()).to.be.null; - }); - - it('should return a leaf value', () => { - populate(23); - expect(subject.val()).to.eq(23); - populate({ b: 23, a: null }); - expect(subject.child('b').val()).to.eq(23); - }); - - it('should coerce object into array if all keys are integers', () => { - populate({ 0: 'a', 1: 'b', 2: { c: 'd' } }); - expect(subject.val()).to.deep.equal(['a', 'b', { c: 'd' }]); - populate({ 0: 'a', 2: 'b', 3: { c: 'd' } }); - expect(subject.val()).to.deep.equal(['a', , 'b', { c: 'd' }]); - populate({ 'foo': { 0: 'a', 1: 'b' } }); - expect(subject.val()).to.deep.equal({ foo: ['a', 'b'] }); - }); - - // Regression test: zero-values (including children) were accidentally forwarded as 'null'. - it('should deal with zero-values appropriately', () => { - populate(0); - expect(subject.val()).to.equal(0); - populate({ myKey: 0 }); - expect(subject.val()).to.deep.equal({ myKey: 0 }); - - // Null values are still reported as null. - populate({ myKey: null }); - expect(subject.val()).to.deep.equal({ myKey: null }); - }); - - // Regression test: .val() was returning array of nulls when there's a property called length (BUG#37683995) - it('should return correct values when data has "length" property', () => { - populate({ length: 3, foo: 'bar' }); - expect(subject.val()).to.deep.equal({ length: 3, foo: 'bar'}); - }); - }); - - describe('#child(): DataSnapshot', () => { - it('should work with multiple calls', () => { - populate({ a: { b: { c: 'd' } } }); - expect(subject.child('a').child('b/c').val()).to.equal('d'); - }); - }); - - describe('#exists(): boolean', () => { - it('should be true for an object value', () => { - populate({ a: { b: 'c' } }); - expect(subject.child('a').exists()).to.be.true; - }); - - it('should be true for a leaf value', () => { - populate({ a: { b: 'c' } }); - expect(subject.child('a/b').exists()).to.be.true; - }); - - it('should be false for a non-existent value', () => { - populate({ a: { b: 'c' } }); - expect(subject.child('d').exists()).to.be.false; - }); - - it('should be false for a value pathed beyond a leaf', () => { - populate({ a: { b: 'c' } }); - expect(subject.child('a/b/c').exists()).to.be.false; - }); - }); - - describe('#forEach(action: (a: DataSnapshot) => boolean): boolean', () => { - it('should iterate through child snapshots', () => { - populate({ a: 'b', c: 'd' }); - let out = ''; - subject.forEach((snap: any) => { - out += snap.val(); - }); - expect(out).to.equal('bd'); - }); - - it('should have correct key values for child snapshots', () => { - populate({ a: 'b', c: 'd' }); - let out = ''; - subject.forEach((snap: any) => { - out += snap.key; - }); - expect(out).to.equal('ac'); - }); - - it('should not execute for leaf or null nodes', () => { - populate(23); - let count = 0; - let counter = (snap: any) => count++; - - expect(subject.forEach(counter)).to.equal(false); - expect(count).to.eq(0); - }); - - it('should cancel further enumeration if callback returns true', () => { - populate({ a: 'b', c: 'd', e: 'f', g: 'h' }); - let out = ''; - const ret = subject.forEach((snap: any) => { - if (snap.val() === 'f') { - return true; - } - out += snap.val(); - }); - expect(out).to.equal('bd'); - expect(ret).to.equal(true); - }); - - it('should not cancel further enumeration if callback returns a truthy value', () => { - populate({ a: 'b', c: 'd', e: 'f', g: 'h' }); - let out = ''; - const ret = subject.forEach((snap: any) => { - out += snap.val(); - return 1; - }); - expect(out).to.equal('bdfh'); - expect(ret).to.equal(false); - }); - - it('should not cancel further enumeration if callback does not return', () => { - populate({ a: 'b', c: 'd', e: 'f', g: 'h' }); - let out = ''; - const ret = subject.forEach((snap: any) => { - out += snap.val(); - }); - expect(out).to.equal('bdfh'); - expect(ret).to.equal(false); - }); - }); - - describe('#numChildren()', () => { - it('should be key count for objects', () => { - populate({ a: 'b', c: 'd' }); - expect(subject.numChildren()).to.eq(2); - }); - - it('should be 0 for non-objects', () => { - populate(23); - expect(subject.numChildren()).to.eq(0); - }); - }); - - describe('#hasChild(childPath): boolean', () => { - it('should return true for a child or deep child', () => { - populate({ a: { b: 'c' }, d: 23 }); - expect(subject.hasChild('a/b')).to.be.true; - expect(subject.hasChild('d')).to.be.true; - }); - - it('should return false if a child is missing', () => { - populate({ a: 'b' }); - expect(subject.hasChild('c')).to.be.false; - expect(subject.hasChild('a/b')).to.be.false; - }); - }); - - describe('#key: string', () => { - it('should return the key name', () => { - expect(subject.key).to.equal('foo'); - }); - - it('should return null for the root', () => { - let [instance, path] = database.resourceToInstanceAndPath('projects/_/instances/foo/refs/'); - const snapshot = new database.DataSnapshot( - null, - path, - apps.admin, - instance - ); - expect(snapshot.key).to.be.null; - }); - - it('should work for child paths', () => { - expect(subject.child('foo/bar').key).to.equal('bar'); - }); - }); - - describe('#toJSON(): Object', () => { - it('should return the current value', () => { - populate({ a: 'b' }); - expect(subject.toJSON()).to.deep.equal(subject.val()); - }); - it('should be stringifyable', () => { - populate({ a: 'b' }); - expect(JSON.stringify(subject)).to.deep.equal('{"a":"b"}'); - }); - }); - }); -}); diff --git a/spec/providers/firestore.spec.ts b/spec/providers/firestore.spec.ts deleted file mode 100644 index a8a44c1fa..000000000 --- a/spec/providers/firestore.spec.ts +++ /dev/null @@ -1,423 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as firestore from '../../src/providers/firestore'; -import { expect } from 'chai'; - -describe('Firestore Functions', () => { - let constructValue = (fields: any) => { - return { - 'fields': fields, - 'name': 'projects/pid/databases/(default)/documents/collection/123', - 'createTime': '2017-06-02T18:48:58.920638Z', - 'updateTime': '2017-07-02T18:48:58.920638Z', - }; - }; - - describe('document builders and event types', () => { - function expectedTrigger(resource: string, eventType: string) { - return { - eventTrigger: { - resource, - eventType: `providers/cloud.firestore/eventTypes/${eventType}`, - service: 'firestore.googleapis.com', - }, - }; - } - - before(() => { - process.env.GCLOUD_PROJECT = 'project1'; - }); - - after(() => { - delete process.env.GCLOUD_PROJECT; - }); - - it('should allow terse constructors', () => { - let resource = 'projects/project1/databases/(default)/documents/users/{uid}'; - let cloudFunction = firestore.document('users/{uid}').onWrite(() => null); - expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, 'document.write')); - }); - - it('should allow custom namespaces', () => { - let resource = 'projects/project1/databases/(default)/documents@v2/users/{uid}'; - let cloudFunction = firestore.namespace('v2').document('users/{uid}').onWrite(() => null); - expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, 'document.write')); - }); - - it('should allow custom databases', () => { - let resource = 'projects/project1/databases/myDB/documents/users/{uid}'; - let cloudFunction = firestore.database('myDB').document('users/{uid}').onWrite(() => null); - expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, 'document.write')); - }); - - it('should allow both custom database and namespace', () => { - let resource = 'projects/project1/databases/myDB/documents@v2/users/{uid}'; - let cloudFunction = firestore.database('myDB').namespace('v2').document('users/{uid}').onWrite(() => null); - expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, 'document.write')); - }); - - it('onCreate should have the "document.create" eventType', () => { - let resource = 'projects/project1/databases/(default)/documents/users/{uid}'; - let eventType = firestore.document('users/{uid}').onCreate(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq(expectedTrigger(resource, 'document.create').eventTrigger.eventType); - }); - - it('onUpdate should have the "document.update" eventType', () => { - let resource = 'projects/project1/databases/(default)/documents/users/{uid}'; - let eventType = firestore.document('users/{uid}').onUpdate(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq(expectedTrigger(resource, 'document.update').eventTrigger.eventType); - }); - - it('onDelete should have the "document.delete" eventType', () => { - let resource = 'projects/project1/databases/(default)/documents/users/{uid}'; - let eventType = firestore.document('users/{uid}').onDelete(() => null).__trigger.eventTrigger.eventType; - expect(eventType).to.eq(expectedTrigger(resource, 'document.delete').eventTrigger.eventType); - }); - }); - - describe('process.env.GCLOUD_PROJECT not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => firestore.document('input').onCreate(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => firestore.document('input').onCreate(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = firestore.document('input').onCreate(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); - - describe('dataConstructor', () => { - function constructEvent(oldValue: object, value: object, eventType: string) { - return { - 'data': { - 'oldValue': oldValue, - 'value': value, - }, - 'context': {}, - }; - } - - function createOldValue() { - return constructValue({ - 'key1': { - 'booleanValue': false, - }, - 'key2': { - 'integerValue': '111', - }, - }); - } - - function createValue() { - return constructValue({ - 'key1': { - 'booleanValue': true, - }, - 'key2': { - 'integerValue': '123', - }, - }); - } - - it('constructs appropriate fields and getters for event.data on "document.write" events', () => { - let testFunction = firestore.document('path').onWrite((change) => { - expect(change.before.data()).to.deep.equal({key1: false, key2: 111}); - expect(change.before.get('key1')).to.equal(false); - expect(change.after.data()).to.deep.equal({key1: true, key2: 123}); - expect(change.after.get('key1')).to.equal(true); - return true; // otherwise will get warning about returning undefined - }); - let data = constructEvent(createOldValue(), createValue(), 'document.write'); - return testFunction(data); - }).timeout(5000); - - it('constructs appropriate fields and getters for event.data on "document.create" events', () => { - let testFunction = firestore.document('path').onCreate((data) => { - expect(data.data()).to.deep.equal({key1: true, key2: 123}); - expect(data.get('key1')).to.equal(true); - return true; // otherwise will get warning about returning undefined - }); - let data = constructEvent({}, createValue(), 'document.create'); - return testFunction(data); - }).timeout(5000); - - it('constructs appropriate fields and getters for event.data on "document.update" events', () => { - let testFunction = firestore.document('path').onUpdate((change) => { - expect(change.before.data()).to.deep.equal({key1: false, key2: 111}); - expect(change.before.get('key1')).to.equal(false); - expect(change.after.data()).to.deep.equal({key1: true, key2: 123}); - expect(change.after.get('key1')).to.equal(true); - return true; // otherwise will get warning about returning undefined - }); - let data = constructEvent(createOldValue(), createValue(), 'document.update'); - return testFunction(data); - }).timeout(5000); - - it('constructs appropriate fields and getters for event.data on "document.delete" events', () => { - let testFunction = firestore.document('path').onDelete((data) => { - expect(data.data()).to.deep.equal({key1: false, key2: 111}); - expect(data.get('key1')).to.equal(false); - return true; // otherwise will get warning about returning undefined - }); - let data = constructEvent(createOldValue(), {}, 'document.delete'); - return testFunction(data); - }).timeout(5000); - }); - - describe('SnapshotConstructor', () => { - describe('#data()', () => { - it('should parse int values', () => { - let snapshot = firestore.snapshotConstructor({ - data: { - value: constructValue({'key': {'integerValue': '123'}}), - }, - }); - expect(snapshot.data()).to.deep.equal({'key': 123}); - }); - - it('should parse double values', () => { - let snapshot = firestore.snapshotConstructor({ - data: { - value: constructValue({'key': {'doubleValue': 12.34}}), - }, - }); - expect(snapshot.data()).to.deep.equal({'key': 12.34}); - }); - - it('should parse null values', () => { - let snapshot = firestore.snapshotConstructor({ - data: { - value: constructValue({'key': {'nullValue': null}}), - }, - }); - expect(snapshot.data()).to.deep.equal({'key': null}); - }); - - it('should parse boolean values', () => { - let snapshot = firestore.snapshotConstructor({ - data: { - value: constructValue({'key': {'booleanValue': true}}), - }, - }); - expect(snapshot.data()).to.deep.equal({'key': true}); - }); - - it('should parse string values', () => { - let snapshot = firestore.snapshotConstructor({ - data: { - value: constructValue({'key': {'stringValue': 'foo'}}), - }, - }); - expect(snapshot.data()).to.deep.equal({'key': 'foo'}); - }); - - it('should parse array values', () => { - let raw = constructValue({ - 'key': { - 'arrayValue': { - 'values': [ - { 'integerValue': '1' }, - { 'integerValue': '2' }, - ], - }, - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'key': [1, 2]}); - }); - - it('should parse object values', () => { - let raw = constructValue({ - 'keyParent': { - 'mapValue': { - 'fields': { - 'key1': { - 'stringValue': 'val1', - }, - 'key2': { - 'stringValue': 'val2', - }, - }, - }, - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'keyParent': {'key1':'val1', 'key2':'val2'}}); - }); - - it('should parse GeoPoint values', () => { - let raw = constructValue({ - 'geoPointValue': { - 'mapValue': { - 'fields': { - 'latitude': { - 'doubleValue': 40.73, - }, - 'longitude': { - 'doubleValue': -73.93, - }, - }, - }, - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'geoPointValue': { - 'latitude': 40.73, - 'longitude': -73.93, - }}); - }); - - it('should parse reference values', () => { - let raw = constructValue({ - 'referenceVal': { - 'referenceValue': 'projects/proj1/databases/(default)/documents/doc1/id', - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()['referenceVal'].path).to.equal('doc1/id'); - }); - - it('should parse timestamp values with precision to the millisecond', () => { - let raw = constructValue({ - 'timestampVal': { - 'timestampValue': '2017-06-13T00:58:40.349Z', - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'timestampVal': new Date('2017-06-13T00:58:40.349Z')}); - }); - - it('should parse timestamp values with precision to the second', () => { - let raw = constructValue({ - 'timestampVal': { - 'timestampValue': '2017-06-13T00:58:40Z', - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'timestampVal': new Date('2017-06-13T00:58:40Z')}); - - }); - - it('should parse binary values', () => { - // Format defined in https://developers.google.com/discovery/v1/type-format - let raw = constructValue({ - 'binaryVal': { - 'bytesValue': 'Zm9vYmFy', - }, - }); - let snapshot = firestore.snapshotConstructor({ - data: { value: raw }, - }); - expect(snapshot.data()).to.deep.equal({'binaryVal': new Buffer('foobar')}); - }); - }); - - describe('Other DocumentSnapshot methods', () => { - let snapshot: FirebaseFirestore.DocumentSnapshot; - - before(() => { - snapshot = firestore.snapshotConstructor({ - 'data': { - 'value': { - 'fields': {'key': {'integerValue': '1'}}, - 'createTime': '2017-06-17T14:45:17.876479Z', - 'updateTime': '2017-08-31T18:05:26.928527Z', - 'readTime': '2017-07-31T18:23:26.928527Z', - 'name': 'projects/pid/databases/(default)/documents/collection/123', - }, - }, - }); - }); - - it('should support #exists', () => { - expect(snapshot.exists).to.be.true; - }); - - it('should support #ref', () => { - expect(Object.keys(snapshot.ref)).to.deep.equal(['_firestore', '_referencePath']); - expect(snapshot.ref.path).to.equal('collection/123'); - }); - - it('should support #id', () => { - expect(snapshot.id).to.equal('123'); - }); - - it('should support #createTime', () => { - expect(Date.parse(snapshot.createTime)).to.equal(Date.parse('2017-06-17T14:45:17.876479Z')); - }); - - it('should support #updateTime', () => { - expect(Date.parse(snapshot.updateTime)).to.equal(Date.parse('2017-08-31T18:05:26.928527Z')); - }); - - it('should support #readTime', () => { - expect(Date.parse(snapshot.readTime)).to.equal(Date.parse('2017-07-31T18:23:26.928527Z')); - }); - }); - - describe('Handle empty and non-existent documents', () => { - it('constructs non-existent DocumentSnapshot when whole document deleted', () => { - let snapshot = firestore.snapshotConstructor({ - 'data': { - 'value': {}, // value is empty when the whole document is deleted - }, - 'resource': 'projects/pid/databases/(default)/documents/collection/123', - }); - expect(snapshot.exists).to.be.false; - expect(snapshot.ref.path).to.equal('collection/123'); - }); - - it('constructs existent DocumentSnapshot with empty data when all fields of document deleted', () => { - let snapshot = firestore.snapshotConstructor({ - 'data': { - 'value': { // value is not empty when document still exists - 'createTime': '2017-06-02T18:48:58.920638Z', - 'updateTime': '2017-07-02T18:48:58.920638Z', - 'name': 'projects/pid/databases/(default)/documents/collection/123', - }, - }, - }); - expect(snapshot.exists).to.be.true; - expect(snapshot.ref.path).to.equal('collection/123'); - expect(snapshot.data()).to.deep.equal({}); - expect(snapshot.get('key1')).to.equal(undefined); - }); - }); - }); -}); diff --git a/spec/providers/https.spec.ts b/spec/providers/https.spec.ts deleted file mode 100644 index 6e3a89032..000000000 --- a/spec/providers/https.spec.ts +++ /dev/null @@ -1,595 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as express from 'express'; -import * as firebase from 'firebase-admin'; -import * as https from '../../src/providers/https'; -import * as jwt from 'jsonwebtoken'; -import * as mocks from '../fixtures/credential/key.json'; -import * as nock from 'nock'; -import * as _ from 'lodash'; -import { apps as appsNamespace } from '../../src/apps'; -import { expect } from 'chai'; - -describe('CloudHttpsBuilder', () => { - describe('#onRequest', () => { - it('should return a Trigger with appropriate values', () => { - let result = https.onRequest((req, resp) => { - resp.send(200); - }); - expect(result.__trigger).to.deep.equal({httpsTrigger: {}}); - }); - }); -}); - -/** - * RunHandlerResult contains the data from an express.Response. - */ -interface RunHandlerResult { - status: number; - headers: {[name: string]: string}; - body: any; -} - -/** - * A CallTest is a specification for a test of a callable function that - * simulates triggering the http endpoint, and checks that the request - * and response are properly converted to their http equivalents. - */ -interface CallTest { - // An http request, mocking a subset of express.Request. - httpRequest: any; - - // The expected format of the request passed to the handler. - expectedData: any; - - // The function to execute with the request. - callableFunction: (data: any, context: https.CallableContext) => any; - - // The expected shape of the http response returned to the callable SDK. - expectedHttpResponse: RunHandlerResult; -} - -/** - * Runs an express handler with a given request asynchronously and returns the - * data populated into the response. - */ -function runHandler(handler: express.Handler, request: express.Request): Promise { - return new Promise((resolve, reject) => { - // MockResponse mocks an express.Response. - // This class lives here so it can reference resolve and reject. - class MockResponse { - private statusCode = 0; - private headers: {[name: string]: string} = {}; - - public status(code: number) { - this.statusCode = code; - return this; - } - - // Headers are only set by the cors handler. - public setHeader(name: string, value: string) { - this.headers[name] = value; - } - - public getHeader(name: string): string { - return this.headers[name]; - } - - public send(body: any) { - resolve({ - status: this.statusCode, - headers: this.headers, - body, - }); - } - - public end() { - this.send(undefined); - } - } - - const response = new MockResponse(); - handler(request, response as any, () => undefined); - }); -} - -// Runs a CallTest test. -async function runTest(test: CallTest): Promise { - const callableFunction = https.onCall((data, context) => { - expect(data).to.deep.equal(test.expectedData); - return test.callableFunction(data, context); - }); - - const response = await runHandler(callableFunction, test.httpRequest); - - expect(response.body).to.deep.equal(test.expectedHttpResponse.body); - expect(response.headers).to.deep.equal(test.expectedHttpResponse.headers); - expect(response.status).to.equal(test.expectedHttpResponse.status); -} - -// MockRequest mocks an express.Request. -class MockRequest { - public method: 'POST'|'GET'|'OPTIONS' = 'POST'; - - constructor(readonly body: any, readonly headers: {[name: string]: string}) { - // This block intentionally left blank. - } - - public header(name: string): string { - return this.headers[name.toLowerCase()]; - } -} - -// Creates a mock request with the given data and content-type. -function request( - data: any, - contentType: string = 'application/json', - context: { - authorization?: string; - instanceIdToken?: string; - } = {}) { - const body: any = {}; - if (!_.isUndefined(data)) { - body.data = data; - } - - const headers = { - 'content-type': contentType, - 'authorization': context.authorization, - 'firebase-instance-id-token': context.instanceIdToken, - 'origin': 'example.com', - }; - - return new MockRequest(body, headers); -} - -const expectedResponseHeaders = { - 'Access-Control-Allow-Origin': 'example.com', - Vary: 'Origin', -}; - -/** - * Mocks out the http request used by the firebase-admin SDK to get the key for - * verifying an id token. - */ -function mockFetchPublicKeys(): nock.Scope { - let mock: nock.Scope = nock('https://www.googleapis.com:443') - .get('/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com'); - const mockedResponse = {[mocks.key_id]: mocks.public_key}; - const headers = { - 'cache-control': 'public, max-age=1, must-revalidate, no-transform', - }; - return mock.reply(200, mockedResponse, headers); -} - -/** - * Generates a mocked Firebase ID token. - */ -export function generateIdToken(projectId: string): string { - const claims = {}; - const options = { - audience: projectId, - expiresIn: 60 * 60, // 1 hour in seconds - issuer: 'https://securetoken.google.com/' + projectId, - subject: mocks.user_id, - algorithm: 'RS256', - header: { - kid: mocks.key_id, - }, - }; - return jwt.sign(claims, mocks.private_key, options); -} - -describe('callable.FunctionBuilder', () => { - let app: firebase.app.App; - - before(() => { - let credential = { - getAccessToken: () => { - return Promise.resolve({ - expires_in: 1000, - access_token: 'fake', - }); - }, - getCertificate: () => { - return { - projectId: 'aProjectId', - }; - }, - }; - app = firebase.initializeApp({ - projectId: 'aProjectId', - credential: credential, - }); - Object.defineProperty(appsNamespace(), 'admin', { get: () => app }); - }); - - after(() => { - app.delete(); - delete appsNamespace.singleton; - }); - - describe('#onCall', () => { - it('should return a Trigger with appropriate values', () => { - const result = https.onCall((data) => { - return 'response'; - }); - expect(result.__trigger).to.deep.equal({ - httpsTrigger: {}, - labels: { 'deployment-callable': 'true' }, - }); - }); - - it('should handle success', () => { - return runTest({ - httpRequest: request({foo: 'bar'}), - expectedData: {foo: 'bar'}, - callableFunction: (data, context) => ({baz: 'qux'}), - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: {baz: 'qux'}}, - }, - }); - }); - - it('should handle null data and return', () => { - return runTest({ - httpRequest: request(null), - expectedData: null, - callableFunction: (data, context) => null, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - }); - - it('should handle void return', () => { - return runTest({ - httpRequest: request(null), - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - }); - - it('should reject bad method', () => { - let req = request(null); - req.method = 'GET'; - return runTest({ - httpRequest: req, - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 400, - headers: expectedResponseHeaders, - body: {error: {status: 'INVALID_ARGUMENT', message: 'Bad Request'}}, - }, - }); - }); - - it('should ignore charset', () => { - return runTest({ - httpRequest: request(null, 'application/json; charset=utf-8'), - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - }); - - it('should reject bad content type', () => { - return runTest({ - httpRequest: request(null, 'text/plain'), - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 400, - headers: expectedResponseHeaders, - body: {error: {status: 'INVALID_ARGUMENT', message: 'Bad Request'}}, - }, - }); - }); - - it('should reject extra body fields', () => { - const req = request(null); - req.body.extra = 'bad'; - return runTest({ - httpRequest: req, - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 400, - headers: expectedResponseHeaders, - body: {error: {status: 'INVALID_ARGUMENT', message: 'Bad Request'}}, - }, - }); - }); - - it('should handle unhandled error', () => { - return runTest({ - httpRequest: request(null), - expectedData: null, - callableFunction: (data, context) => { - throw 'ceci n\'est pas une error'; - }, - expectedHttpResponse: { - status: 500, - headers: expectedResponseHeaders, - body: {error: {status: 'INTERNAL', message: 'INTERNAL'}}, - }, - }); - }); - - it('should handle unknown error status', () => { - return runTest({ - httpRequest: request(null), - expectedData: null, - callableFunction: (data, context) => { - throw new https.HttpsError('THIS_IS_NOT_VALID' as any, 'nope'); - }, - expectedHttpResponse: { - status: 500, - headers: expectedResponseHeaders, - body: {error: {status: 'INTERNAL', message: 'INTERNAL'}}, - }, - }); - }); - - it('should handle well-formed error', () => { - return runTest({ - httpRequest: request(null), - expectedData: null, - callableFunction: (data, context) => { - throw new https.HttpsError('not-found', 'i am error'); - }, - expectedHttpResponse: { - status: 404, - headers: expectedResponseHeaders, - body: {error: {status: 'NOT_FOUND', message: 'i am error'}}, - }, - }); - }); - - it('should handle auth', async () => { - const mock = mockFetchPublicKeys(); - const projectId = appsNamespace().admin.options.projectId; - const idToken = generateIdToken(projectId); - await runTest({ - httpRequest: request(null, 'application/json', { - authorization: 'Bearer ' + idToken, - }), - expectedData: null, - callableFunction: (data, context) => { - expect(context.auth).to.not.be.undefined; - expect(context.auth).to.not.be.null; - expect(context.auth.uid).to.equal(mocks.user_id); - expect(context.auth.token.uid).to.equal(mocks.user_id); - expect(context.auth.token.sub).to.equal(mocks.user_id); - expect(context.auth.token.aud).to.equal(projectId); - expect(context.instanceIdToken).to.be.undefined; - return null; - }, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - mock.done(); - }); - - it('should reject bad auth', async () => { - await runTest({ - httpRequest: request(null, 'application/json', { - authorization: 'Bearer FAKE', - }), - expectedData: null, - callableFunction: (data, context) => { return; }, - expectedHttpResponse: { - status: 401, - headers: expectedResponseHeaders, - body: { - error: { - status: 'UNAUTHENTICATED', - message: 'Unauthenticated', - }, - }, - }, - }); - }); - - it('should handle instance id', async () => { - await runTest({ - httpRequest: request(null, 'application/json', { - instanceIdToken: 'iid-token', - }), - expectedData: null, - callableFunction: (data, context) => { - expect(context.auth).to.be.undefined; - expect(context.instanceIdToken).to.equal('iid-token'); - return null; - }, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - }); - - it('should expose raw request', async () => { - const mockRequest = request(null, 'application/json', {}); - await runTest({ - httpRequest: mockRequest, - expectedData: null, - callableFunction: (data, context) => { - expect(context.rawRequest).to.not.be.undefined; - expect(context.rawRequest).to.equal(mockRequest); - return null; - }, - expectedHttpResponse: { - status: 200, - headers: expectedResponseHeaders, - body: {result: null}, - }, - }); - }); - }); -}); - -describe('callable CORS', () => { - it('handles OPTIONS preflight', async () => { - const func = https.onCall((data, context) => { - throw "This shouldn't have gotten called for an OPTIONS preflight."; - }); - - const request = new MockRequest({}, { - 'Access-Control-Request-Method': 'POST', - 'Access-Control-Request-Headers': 'origin', - Origin: 'example.com', - }); - request.method = 'OPTIONS'; - - const response = await runHandler(func, request as any); - - expect(response.status).to.equal(204); - expect(response.body).to.be.undefined; - expect(response.headers).to.deep.equal({ - 'Access-Control-Allow-Methods': 'POST', - 'Content-Length': '0', - Vary: 'Origin, Access-Control-Request-Headers', - }); - }); -}); - -describe('callable', () => { - it('encodes null', () => { - expect(https.encode(null)).to.be.null; - expect(https.encode(undefined)).to.be.null; - }); - - it('encodes int', () => { - expect(https.encode(1)).to.equal(1); - // Number isn't allowed in our own codebase, but we need to test it, in case - // a user passes one in. There's no reason not to support it, and we don't - // want to unintentionally encode them as {}. - // tslint:disable-next-line - expect(https.encode(new Number(1))).to.equal(1); - }); - - it('decodes int', () => { - expect(https.decode(1)).to.equal(1); - }); - - it('encodes long', () => { - expect(https.encode(-9223372036854775000)).to.equal( - -9223372036854775000); - }); - - it('decodes long', () => { - expect(https.decode({ - '@type': 'type.googleapis.com/google.protobuf.Int64Value', - 'value': '-9223372036854775000', - })).to.equal(-9223372036854775000); - }); - - it('encodes unsigned long', () => { - expect(https.encode(9223372036854800000)).to.equal(9223372036854800000); - }); - - it('decodes unsigned long', () => { - expect(https.decode({ - '@type': 'type.googleapis.com/google.protobuf.UInt64Value', - 'value': '9223372036854800000', - })).to.equal(9223372036854800000); - }); - - it('encodes double', () => { - expect(https.encode(1.2)).to.equal(1.2); - }); - - it('decodes double', () => { - expect(https.decode(1.2)).to.equal(1.2); - }); - - it('encodes string', () => { - expect(https.encode('hello')).to.equal('hello'); - }); - - it('decodes string', () => { - expect(https.decode('hello')).to.equal('hello'); - }); - - it('encodes array', () => { - // TODO(klimt): Make this test more interesting once there's some type - // that needs encoding that can be created from JavaScript. - expect(https.encode([1, '2', [3, 4]])).to.deep.equal([1, '2', [3, 4]]); - }); - - it('decodes array', () => { - expect(https.decode( - [1, '2', [3, { - value: '1099511627776', - '@type': 'type.googleapis.com/google.protobuf.Int64Value', - }]])).to.deep.equal([1, '2', [3, 1099511627776]]); - }); - - it('encodes object', () => { - // TODO(klimt): Make this test more interesting once there's some type - // that needs encoding that can be created from JavaScript. - expect(https.encode({ - foo: 1, - bar: 'hello', - baz: [1, 2, 3], - })).to.deep.equal({ - foo: 1, - bar: 'hello', - baz: [1, 2, 3], - }); - }); - - it('decodes object', () => { - expect(https.decode({ - foo: 1, - bar: 'hello', - baz: [1, 2, { - value: '1099511627776', - '@type': 'type.googleapis.com/google.protobuf.Int64Value', - }], - })).to.deep.equal({ - foo: 1, - bar: 'hello', - baz: [1, 2, 1099511627776], - }); - }); -}); diff --git a/spec/providers/pubsub.spec.ts b/spec/providers/pubsub.spec.ts deleted file mode 100644 index e4330bff8..000000000 --- a/spec/providers/pubsub.spec.ts +++ /dev/null @@ -1,131 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as pubsub from '../../src/providers/pubsub'; -import { expect } from 'chai'; - -describe('Pubsub Functions', () => { - describe('pubsub.Message', () => { - describe('#json', () => { - it('should return json decoded from base64', () => { - let message = new pubsub.Message({ - data: new Buffer('{"hello":"world"}', 'utf8').toString('base64'), - }); - - expect(message.json.hello).to.equal('world'); - }); - - it('should preserve passed in json', () => { - let message = new pubsub.Message({ - data: new Buffer('{"hello":"world"}', 'utf8').toString('base64'), - json: {goodbye: 'world'}, - }); - - expect(message.json.goodbye).to.equal('world'); - }); - }); - - describe('#toJSON', () => { - it('should be JSON stringify-able', () => { - let encoded = new Buffer('{"hello":"world"}', 'utf8').toString('base64'); - let message = new pubsub.Message({ - data: encoded, - }); - - expect(JSON.parse(JSON.stringify(message))).to.deep.equal({ - data: encoded, - attributes: {}, - }); - }); - }); - }); - - describe('pubsub.FunctionBuilder', () => { - - before(() => { - process.env.GCLOUD_PROJECT = 'project1'; - }); - - after(() => { - delete process.env.GCLOUD_PROJECT; - }); - - describe('#onPublish', () => { - it('should return a TriggerDefinition with appropriate values', () => { - // Pick up project from process.env.GCLOUD_PROJECT - const result = pubsub.topic('toppy').onPublish(() => null); - expect(result.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.pubsub.topic.publish', - resource: 'projects/project1/topics/toppy', - service: 'pubsub.googleapis.com', - }, - }); - }); - - it ('should throw with improperly formatted topics', () => { - expect(() => pubsub.topic('bad/topic/format')).to.throw(Error); - }); - - it('should properly handle a new-style event', () => { - const raw = new Buffer('{"hello":"world"}', 'utf8').toString('base64'); - const event = { - data: { - data: raw, - attributes: { - foo: 'bar', - }, - }, - }; - - const result = pubsub.topic('toppy').onPublish(data => { - return { - raw: data.data, - json: data.json, - attributes: data.attributes, - }; - }); - - return expect(result(event)).to.eventually.deep.equal({ - raw, - json: {hello: 'world'}, - attributes: {foo: 'bar'}, - }); - }); - }); - }); - - describe('process.env.GCLOUD_PROJECT not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => pubsub.topic('toppy').onPublish(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => pubsub.topic('toppy').onPublish(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = pubsub.topic('toppy').onPublish(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); -}); diff --git a/spec/providers/storage.spec.ts b/spec/providers/storage.spec.ts deleted file mode 100644 index c6946082f..000000000 --- a/spec/providers/storage.spec.ts +++ /dev/null @@ -1,277 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as storage from '../../src/providers/storage'; -import { expect as expect } from 'chai'; - -describe('Storage Functions', () => { - describe('ObjectBuilder', () => { - before(() => { - process.env.FIREBASE_CONFIG = JSON.stringify({ - storageBucket: 'bucket', - }); - }); - - after(() => { - delete process.env.FIREBASE_CONFIG; - }); - - describe('#onArchive', () => { - it('should return a TriggerDefinition with appropriate values', () => { - let cloudFunction = storage.bucket('bucky').object().onArchive(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.archive', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should use the default bucket when none is provided', () => { - let cloudFunction = storage.object().onArchive(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.archive', - resource: 'projects/_/buckets/bucket', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should allow fully qualified bucket names', () => { - let subjectQualified = new storage.ObjectBuilder(() => 'projects/_/buckets/bucky'); - let result = subjectQualified.onArchive(() => null); - expect(result.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.archive', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should throw with improperly formatted buckets', () => { - expect(() => storage.bucket('bad/bucket/format').object().onArchive(() => null).__trigger) - .to.throw(Error); - }); - - it('should not mess with media links using non-literal slashes', () => { - let cloudFunction = storage.object().onArchive(data => { - return data.mediaLink; - }); - let goodMediaLinkEvent = { - data: { - mediaLink: 'https://www.googleapis.com/storage/v1/b/mybucket.appspot.com' - + '/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media', - }, - }; - return cloudFunction(goodMediaLinkEvent).then((result: any) => { - expect(result).equals(goodMediaLinkEvent.data.mediaLink); - }); - }); - }); - - describe('#onDelete', () => { - it('should return a TriggerDefinition with appropriate values', () => { - let cloudFunction = storage.bucket('bucky').object().onDelete(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.delete', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should use the default bucket when none is provided', () => { - let cloudFunction = storage.object().onDelete(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.delete', - resource: 'projects/_/buckets/bucket', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should allow fully qualified bucket names', () => { - let subjectQualified = new storage.ObjectBuilder(() => 'projects/_/buckets/bucky'); - let result = subjectQualified.onDelete(() => null); - expect(result.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.delete', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should throw with improperly formatted buckets', () => { - expect(() => storage.bucket('bad/bucket/format').object().onDelete(() => null).__trigger) - .to.throw(Error); - }); - - it('should not mess with media links using non-literal slashes', () => { - let cloudFunction = storage.object().onDelete(data => { - return data.mediaLink; - }); - let goodMediaLinkEvent = { - data: { - mediaLink: 'https://www.googleapis.com/storage/v1/b/mybucket.appspot.com' - + '/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media', - }, - }; - return cloudFunction(goodMediaLinkEvent).then((result: any) => { - expect(result).equals(goodMediaLinkEvent.data.mediaLink); - }); - }); - }); - - describe('#onFinalize', () => { - it('should return a TriggerDefinition with appropriate values', () => { - let cloudFunction = storage.bucket('bucky').object().onFinalize(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.finalize', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should use the default bucket when none is provided', () => { - let cloudFunction = storage.object().onFinalize(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.finalize', - resource: 'projects/_/buckets/bucket', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should allow fully qualified bucket names', () => { - let subjectQualified = new storage.ObjectBuilder(() => 'projects/_/buckets/bucky'); - let result = subjectQualified.onFinalize(() => null); - expect(result.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.finalize', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should throw with improperly formatted buckets', () => { - expect(() => storage.bucket('bad/bucket/format').object().onFinalize(() => null).__trigger) - .to.throw(Error); - }); - - it('should not mess with media links using non-literal slashes', () => { - let cloudFunction = storage.object().onFinalize(data => { - return data.mediaLink; - }); - let goodMediaLinkEvent = { - data: { - mediaLink: 'https://www.googleapis.com/storage/v1/b/mybucket.appspot.com' - + '/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media', - }, - }; - return cloudFunction(goodMediaLinkEvent).then((result: any) => { - expect(result).equals(goodMediaLinkEvent.data.mediaLink); - }); - }); - }); - - describe('#onMetadataUpdate', () => { - it('should return a TriggerDefinition with appropriate values', () => { - let cloudFunction = storage.bucket('bucky').object().onMetadataUpdate(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.metadataUpdate', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should use the default bucket when none is provided', () => { - let cloudFunction = storage.object().onMetadataUpdate(() => null); - expect(cloudFunction.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.metadataUpdate', - resource: 'projects/_/buckets/bucket', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should allow fully qualified bucket names', () => { - let subjectQualified = new storage.ObjectBuilder(() => 'projects/_/buckets/bucky'); - let result = subjectQualified.onMetadataUpdate(() => null); - expect(result.__trigger).to.deep.equal({ - eventTrigger: { - eventType: 'google.storage.object.metadataUpdate', - resource: 'projects/_/buckets/bucky', - service: 'storage.googleapis.com', - }, - }); - }); - - it('should throw with improperly formatted buckets', () => { - expect(() => storage.bucket('bad/bucket/format').object().onMetadataUpdate(() => null).__trigger) - .to.throw(Error); - }); - - it('should not mess with media links using non-literal slashes', () => { - let cloudFunction = storage.object().onMetadataUpdate(data => { - return data.mediaLink; - }); - let goodMediaLinkEvent = { - data: { - mediaLink: 'https://www.googleapis.com/storage/v1/b/mybucket.appspot.com' - + '/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media', - }, - }; - return cloudFunction(goodMediaLinkEvent).then((result: any) => { - expect(result).equals(goodMediaLinkEvent.data.mediaLink); - }); - }); - }); - }); - - describe('process.env.FIREBASE_CONFIG not set', () => { - it('should not throw if __trigger is not accessed', () => { - expect(() => storage.object().onArchive(() => null)).to.not.throw(Error); - }); - - it('should throw when trigger is accessed', () => { - expect(() => storage.object().onArchive(() => null).__trigger).to.throw(Error); - }); - - it('should not throw when #run is called', () => { - let cf = storage.object().onArchive(() => null); - expect(cf.run).to.not.throw(Error); - }); - }); -}); diff --git a/spec/runtime/loader.spec.ts b/spec/runtime/loader.spec.ts new file mode 100644 index 000000000..e67140c1b --- /dev/null +++ b/spec/runtime/loader.spec.ts @@ -0,0 +1,479 @@ +import { expect } from "chai"; +import * as path from "path"; + +import * as functions from "../../src/v1"; +import * as loader from "../../src/runtime/loader"; +import { + ManifestEndpoint, + ManifestExtension, + ManifestRequiredAPI, + ManifestStack, +} from "../../src/runtime/manifest"; +import { clearParams } from "../../src/params"; +import { MINIMAL_V1_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../fixtures"; +import { MINIMAL_SCHEDULE_TRIGGER, MINIMIAL_TASK_QUEUE_TRIGGER } from "../v1/providers/fixtures"; +import { BooleanParam, IntParam, StringParam } from "../../src/params/types"; + +describe("extractStack", () => { + const httpFn = functions.https.onRequest(() => undefined); + const httpEndpoint = { + platform: "gcfv1", + httpsTrigger: {}, + }; + + const callableFn = functions.https.onCall(() => undefined); + const callableEndpoint = { + platform: "gcfv1", + labels: {}, // TODO: empty labels? + callableTrigger: {}, + }; + + it("extracts stack from a simple module", () => { + const module = { + http: httpFn, + callable: callableFn, + }; + + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + + loader.extractStack(module, endpoints, requiredAPIs, extensions); + + expect(endpoints).to.be.deep.equal({ + http: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "http", + ...httpEndpoint, + }, + callable: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "callable", + ...callableEndpoint, + }, + }); + + expect(requiredAPIs).to.be.empty; + }); + + it("extracts stack with required APIs", () => { + const module = { + taskq: functions.tasks.taskQueue().onDispatch(() => undefined), + }; + + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + + loader.extractStack(module, endpoints, requiredAPIs, extensions); + + expect(endpoints).to.be.deep.equal({ + taskq: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "taskq", + platform: "gcfv1", + taskQueueTrigger: MINIMIAL_TASK_QUEUE_TRIGGER, + }, + }); + + expect(requiredAPIs).to.be.deep.equal([ + { + api: "cloudtasks.googleapis.com", + reason: "Needed for task queue functions", + }, + ]); + }); + + it("extracts stack from a module with group functions", () => { + const module = { + fn1: httpFn, + g1: { + fn2: httpFn, + }, + }; + + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + + loader.extractStack(module, endpoints, requiredAPIs, extensions); + + expect(endpoints).to.be.deep.equal({ + fn1: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "fn1", + ...httpEndpoint, + }, + "g1-fn2": { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "g1.fn2", + ...httpEndpoint, + }, + }); + }); + + describe("with GCLOUD_PROJECT env var", () => { + const project = "my-project"; + let prev; + + beforeEach(() => { + prev = process.env.GCLOUD_PROJECT; + process.env.GCLOUD_PROJECT = project; + }); + + afterEach(() => { + process.env.GCLOUD_PROJECT = prev; + clearParams(); + }); + + it("extracts stack from a simple module", () => { + const module = { + fn: functions.pubsub.topic("my-topic").onPublish(() => undefined), + }; + + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + + loader.extractStack(module, endpoints, requiredAPIs, extensions); + + expect(endpoints).to.be.deep.equal({ + fn: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "fn", + platform: "gcfv1", + eventTrigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: { + resource: "projects/my-project/topics/my-topic", + }, + retry: false, + }, + labels: {}, + }, + }); + }); + + it("extracts stack with required APIs", () => { + const module = { + scheduled: functions.pubsub.schedule("every 5 minutes").onRun(() => undefined), + }; + + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + + loader.extractStack(module, endpoints, requiredAPIs, extensions); + + expect(endpoints).to.be.deep.equal({ + scheduled: { + ...MINIMAL_V1_ENDPOINT, + entryPoint: "scheduled", + platform: "gcfv1", + // TODO: This label should not exist? + labels: {}, + scheduleTrigger: { ...MINIMAL_SCHEDULE_TRIGGER, schedule: "every 5 minutes" }, + }, + }); + + expect(requiredAPIs).to.be.deep.equal([ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]); + }); + }); +}); + +describe("mergedRequiredAPIs", () => { + it("leaves required APIs unchanged if nothing to merge", () => { + expect( + loader.mergeRequiredAPIs([ + { api: "example1.com", reason: "example1" }, + { api: "example2.com", reason: "example2" }, + ]) + ).to.be.deep.equal([ + { api: "example1.com", reason: "example1" }, + { api: "example2.com", reason: "example2" }, + ]); + }); + + it("merges reasons given overlapping required api", () => { + expect( + loader.mergeRequiredAPIs([ + { api: "example1.com", reason: "example1a" }, + { api: "example1.com", reason: "example1b" }, + { api: "example2.com", reason: "example2" }, + ]) + ).to.be.deep.equal([ + { api: "example1.com", reason: "example1a example1b" }, + { api: "example2.com", reason: "example2" }, + ]); + }); + + it("merges reasons given overlapping required api", () => { + expect( + loader.mergeRequiredAPIs([ + { api: "example1.com", reason: "example1a" }, + { api: "example1.com", reason: "example1b" }, + { api: "example2.com", reason: "example2" }, + ]) + ).to.be.deep.equal([ + { api: "example1.com", reason: "example1a example1b" }, + { api: "example2.com", reason: "example2" }, + ]); + }); + + it("does not repeat the same reason", () => { + expect( + loader.mergeRequiredAPIs([ + { api: "example1.com", reason: "example1a" }, + { api: "example1.com", reason: "example1a" }, + { api: "example2.com", reason: "example2" }, + ]) + ).to.be.deep.equal([ + { api: "example1.com", reason: "example1a" }, + { api: "example2.com", reason: "example2" }, + ]); + }); +}); + +describe("loadStack", () => { + const expected: ManifestStack = { + endpoints: { + v1http: { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + entryPoint: "v1http", + httpsTrigger: {}, + }, + v1callable: { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + entryPoint: "v1callable", + labels: {}, + callableTrigger: {}, + }, + v2http: { + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + }, + v2callable: { + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + entryPoint: "v2callable", + labels: {}, + callableTrigger: {}, + }, + ttOnStart: { + ...MINIMAL_V2_ENDPOINT, + entryPoint: "ttOnStart", + eventTrigger: { + channel: "projects/locations/us-central1/channels/firebase", + eventFilters: {}, + eventType: "firebase.extensions.firestore-translate-text.v1.onStart", + retry: false, + }, + labels: {}, + platform: "gcfv2", + region: ["us-central1"], + }, + }, + requiredAPIs: [ + { + api: "eventarcpublishing.googleapis.com", + reason: "Needed for custom event functions", + }, + ], + extensions: { + extRef1: { + params: { + COLLECTION_PATH: "collection1", + INPUT_FIELD_NAME: "input1", + LANGUAGES: "de,es", + OUTPUT_FIELD_NAME: "translated", + "firebaseextensions.v1beta.function/location": "us-central1", + _EVENT_ARC_REGION: "us-central1", + }, + ref: "firebase/firestore-translate-text@0.1.18", + events: ["firebase.extensions.firestore-translate-text.v1.onStart"], + }, + extLocal2: { + params: { + DO_BACKFILL: "False", + LOCATION: "us-central1", + }, + localPath: "./functions/generated/extensions/local/backfill/0.0.2/src", + events: [], + }, + }, + specVersion: "v1alpha1", + }; + + interface Testcase { + name: string; + modulePath: string; + expected: ManifestStack; + } + function runTests(tc: Testcase) { + it("loads stack given relative path", async () => { + await expect(loader.loadStack(tc.modulePath)).to.eventually.deep.equal(tc.expected); + }); + + it("loads stack given absolute path", async () => { + await expect( + loader.loadStack(path.join(process.cwd(), tc.modulePath)) + ).to.eventually.deep.equal(tc.expected); + }); + } + + let prev; + + beforeEach(() => { + // TODO: When __trigger annotation is removed and GCLOUD_PROJECT is not required at runtime, remove this. + prev = process.env.GCLOUD_PROJECT; + process.env.GCLOUD_PROJECT = "test-project"; + }); + + afterEach(() => { + process.env.GCLOUD_PROJECT = prev; + }); + + describe("commonjs", () => { + const testcases: Testcase[] = [ + { + name: "basic", + modulePath: "./spec/fixtures/sources/commonjs", + expected, + }, + { + name: "has main", + modulePath: "./spec/fixtures/sources/commonjs-main", + expected, + }, + { + name: "grouped", + modulePath: "./spec/fixtures/sources/commonjs-grouped", + expected: { + ...expected, + endpoints: { + ...expected.endpoints, + "g1-groupedhttp": { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + entryPoint: "g1.groupedhttp", + httpsTrigger: {}, + }, + "g1-groupedcallable": { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + entryPoint: "g1.groupedcallable", + labels: {}, + callableTrigger: {}, + }, + }, + }, + }, + { + name: "has params", + modulePath: "./spec/fixtures/sources/commonjs-params", + expected: { + ...expected, + params: [ + { name: "BORING", type: "string" }, + { + name: "FOO", + type: "string", + input: { text: { validationRegex: "w+" } }, + }, + { + name: "BAR", + type: "string", + default: "{{ params.FOO }}", + label: "asdf", + }, + { + name: "BAZ", + type: "string", + input: { + select: { options: [{ value: "a" }, { value: "b" }] }, + }, + }, + { name: "AN_INT", type: "int", default: `{{ params.BAR == "qux" ? 0 : 1 }}` }, + { + name: "ANOTHER_INT", + type: "int", + input: { + select: { + options: [ + { label: "a", value: -2 }, + { label: "b", value: 2 }, + ], + }, + }, + }, + { + name: "LIST_PARAM", + type: "list", + input: { + multiSelect: { options: [{ value: "c" }, { value: "d" }, { value: "e" }] }, + }, + }, + { name: "SUPER_SECRET_FLAG", type: "secret" }, + ], + }, + }, + { + name: "can use parameterized fields", + modulePath: "./spec/fixtures/sources/commonjs-parametrized-fields", + expected: { + ...expected, + params: [ + { name: "STRING_PARAM", type: "string" }, + { name: "INT_PARAM", type: "int" }, + { name: "BOOLEAN_PARAM", type: "boolean" }, + ], + requiredAPIs: [], + extensions: {}, + endpoints: { + v1http: { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + entryPoint: "v1http", + minInstances: new IntParam("INT_PARAM"), + maxInstances: new IntParam("INT_PARAM"), + availableMemoryMb: new IntParam("INT_PARAM"), + timeoutSeconds: new IntParam("INT_PARAM"), + serviceAccountEmail: new StringParam("STRING_PARAM"), + omit: new BooleanParam("BOOLEAN_PARAM"), + httpsTrigger: {}, + }, + v2http: { + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + entryPoint: "v2http", + minInstances: new IntParam("INT_PARAM"), + maxInstances: new IntParam("INT_PARAM"), + availableMemoryMb: new IntParam("INT_PARAM"), + timeoutSeconds: new IntParam("INT_PARAM"), + serviceAccountEmail: new StringParam("STRING_PARAM"), + omit: new BooleanParam("BOOLEAN_PARAM"), + labels: {}, + httpsTrigger: {}, + }, + }, + }, + }, + ]; + + for (const tc of testcases) { + describe(tc.name, () => { + runTests(tc); + }); + } + }); +}); diff --git a/spec/runtime/manifest.spec.ts b/spec/runtime/manifest.spec.ts new file mode 100644 index 000000000..7534ba2ee --- /dev/null +++ b/spec/runtime/manifest.spec.ts @@ -0,0 +1,309 @@ +import { expect } from "chai"; +import { + stackToWire, + ManifestStack, + initV2ScheduleTrigger, + initV1ScheduleTrigger, + initTaskQueueTrigger, +} from "../../src/runtime/manifest"; +import { RESET_VALUE } from "../../src/common/options"; +import * as params from "../../src/params"; +import * as optsv2 from "../../src/v2/options"; +import * as v1 from "../../src/v1"; +import { DeploymentOptions } from "../../src/v1"; + +describe("stackToWire", () => { + afterEach(() => { + params.clearParams(); + }); + + it("converts regular expressions used in param inputs", () => { + const regExpParam = params.defineString("foo", { + input: { text: { validationRegex: /\d{5}/ } }, + }); + + const stack: ManifestStack = { + endpoints: {}, + requiredAPIs: [], + params: [regExpParam.toSpec()], + specVersion: "v1alpha1", + extensions: {}, + }; + const expected = { + endpoints: {}, + requiredAPIs: [], + params: [ + { + name: "foo", + type: "string", + input: { + text: { + validationRegex: "\\d{5}", + }, + }, + }, + ], + specVersion: "v1alpha1", + extensions: {}, + }; + expect(stackToWire(stack)).to.deep.equal(expected); + }); + + it("converts stack with null values", () => { + const stack: ManifestStack = { + endpoints: { + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + maxInstances: null, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + const expected = { + endpoints: { + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + maxInstances: null, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + expect(stackToWire(stack)).to.deep.equal(expected); + }); + + it("converts stack with RESET_VALUES", () => { + const stack: ManifestStack = { + endpoints: { + v1http: { + platform: "gcfv1", + entryPoint: "v1http", + labels: {}, + httpsTrigger: {}, + maxInstances: v1.RESET_VALUE, + }, + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + maxInstances: optsv2.RESET_VALUE, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + const expected = { + endpoints: { + v1http: { + platform: "gcfv1", + entryPoint: "v1http", + labels: {}, + httpsTrigger: {}, + maxInstances: null, + }, + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + maxInstances: null, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + expect(stackToWire(stack)).to.deep.equal(expected); + }); + + it("converts Expression types in endpoint options to CEL", () => { + const intParam = params.defineInt("foo", { default: 11 }); + const stringParam = params.defineString("bar", { + default: "America/Los_Angeles", + }); + + const stack: ManifestStack = { + endpoints: { + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + concurrency: intParam, + maxInstances: intParam.equals(24).thenElse(-1, 1), + }, + v2schedule: { + platform: "gcfv2", + entryPoint: "v2callable", + labels: {}, + scheduleTrigger: { + schedule: stringParam.equals("America/Mexico_City").thenElse("mexico", "usa"), + timeZone: stringParam, + }, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + const expected = { + endpoints: { + v2http: { + platform: "gcfv2", + entryPoint: "v2http", + labels: {}, + httpsTrigger: {}, + concurrency: "{{ params.foo }}", + maxInstances: "{{ params.foo == 24 ? -1 : 1 }}", + }, + v2schedule: { + platform: "gcfv2", + entryPoint: "v2callable", + labels: {}, + scheduleTrigger: { + schedule: '{{ params.bar == "America/Mexico_City" ? "mexico" : "usa" }}', + timeZone: "{{ params.bar }}", + }, + }, + }, + requiredAPIs: [], + specVersion: "v1alpha1", + extensions: { + ext1: { + params: {}, + localPath: "localPath", + events: [], + }, + }, + }; + expect(stackToWire(stack)).to.deep.equal(expected); + }); +}); + +describe("initTaskQueueTrigger", () => { + it("should init a taskQueueTrigger without preserveExternalChanges", () => { + const tt = initTaskQueueTrigger(); + + expect(tt).to.deep.eq({ + retryConfig: { + maxAttempts: RESET_VALUE, + maxDoublings: RESET_VALUE, + maxBackoffSeconds: RESET_VALUE, + maxRetrySeconds: RESET_VALUE, + minBackoffSeconds: RESET_VALUE, + }, + rateLimits: { + maxConcurrentDispatches: RESET_VALUE, + maxDispatchesPerSecond: RESET_VALUE, + }, + }); + }); + + it("should init a taskQueueTrigger with preserveExternalChanges", () => { + const opts: DeploymentOptions = { preserveExternalChanges: true }; + + const tt = initTaskQueueTrigger(opts); + + expect(tt).to.deep.eq({ + rateLimits: {}, + retryConfig: {}, + }); + }); +}); + +describe("initScheduleTrigger", () => { + it("should init a v1 scheduleTrigger without preserveExternalChanges", () => { + const st = initV1ScheduleTrigger("every 30 minutes"); + + expect(st).to.deep.eq({ + schedule: "every 30 minutes", + timeZone: RESET_VALUE, + retryConfig: { + retryCount: RESET_VALUE, + maxDoublings: RESET_VALUE, + maxRetryDuration: RESET_VALUE, + minBackoffDuration: RESET_VALUE, + maxBackoffDuration: RESET_VALUE, + }, + }); + }); + + it("should init a v1 scheduleTrigger with preserveExternalChanges", () => { + const opts: DeploymentOptions = { preserveExternalChanges: true }; + + const st = initV1ScheduleTrigger("every 30 minutes", opts); + + expect(st).to.deep.eq({ + schedule: "every 30 minutes", + retryConfig: {}, + }); + }); + + it("should init a v2 scheduleTrigger without preserveExternalChanges", () => { + const st = initV2ScheduleTrigger("every 30 minutes"); + + expect(st).to.deep.eq({ + schedule: "every 30 minutes", + timeZone: RESET_VALUE, + retryConfig: { + retryCount: RESET_VALUE, + maxDoublings: RESET_VALUE, + maxRetrySeconds: RESET_VALUE, + minBackoffSeconds: RESET_VALUE, + maxBackoffSeconds: RESET_VALUE, + }, + }); + }); + + it("should init a v2 scheduleTrigger with preserveExternalChanges", () => { + const opts: DeploymentOptions = { preserveExternalChanges: true }; + + const st = initV2ScheduleTrigger("every 30 minutes", opts); + + expect(st).to.deep.eq({ + schedule: "every 30 minutes", + retryConfig: {}, + }); + }); +}); diff --git a/spec/utils.spec.ts b/spec/utils.spec.ts deleted file mode 100644 index 1a2ad54c1..000000000 --- a/spec/utils.spec.ts +++ /dev/null @@ -1,80 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { normalizePath, pathParts, valAt, applyChange } from '../src/utils'; -import { expect } from 'chai'; - -describe ('utils', () => { - describe('.normalizePath(path: string)', () => { - it('should strip leading and trailing slash', () => { - expect(normalizePath('/my/path/is/{rad}/')).to.eq('my/path/is/{rad}'); - }); - }); - - describe('.pathParts(path: string): string[]', () => { - it('should turn a path into an array of strings', () => { - expect(pathParts('/foo/bar/baz')).to.deep.equal(['foo', 'bar', 'baz']); - }); - - it('should turn a root path, empty string, or null path into an empty array', () => { - expect(pathParts('')).to.deep.equal([]); - expect(pathParts(null)).to.deep.equal([]); - expect(pathParts('/')).to.deep.equal([]); - }); - }); - - describe('.valAt(source: any, path?: string): any', () => { - it('should be null if null along any point in the path', () => { - expect(valAt(null)).to.be.null; - expect(valAt(null, '/foo')).to.be.null; - expect(valAt({a: {b: null}}, '/a/b/c')).to.be.null; - }); - - it('should be null if accessing a path past a leaf value', () => { - expect(valAt({a: 2}, '/a/b')).to.be.null; - }); - - it('should be the leaf value if one is present', () => { - expect(valAt({a: {b: 23}}, '/a/b')).to.eq(23); - expect(valAt({a: {b: 23}}, '/a')).to.deep.equal({b: 23}); - }); - - it('should be undefined if in unexplored territory', () => { - expect(valAt({a: 23}, '/b')).to.be.undefined; - }); - }); - - describe('.applyChange(from: any, to: any): any', () => { - it('should return the to value for non-object values of from and to', () => { - expect(applyChange({a: 'b'}, null)).to.eq(null); - expect(applyChange(null, {a: 'b'})).to.deep.equal({a: 'b'}); - expect(applyChange(23, null)).to.be.null; - }); - - it('should return the merged value of two objects', () => { - let from = {a: {b: 'foo', c: 23, d: 444}, d: {e: 42}}; - let to: any = {a: {b: 'bar', c: null}, d: null, e: {f: 'g'}}; - let result = {a: {b: 'bar', d: 444}, e: {f: 'g'}}; - expect(applyChange(from, to)).to.deep.equal(result); - }); - }); -}); diff --git a/spec/v1/cloud-functions.spec.ts b/spec/v1/cloud-functions.spec.ts new file mode 100644 index 000000000..d85afbe2f --- /dev/null +++ b/spec/v1/cloud-functions.spec.ts @@ -0,0 +1,422 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import { + onInit, + Event, + EventContext, + makeCloudFunction, + MakeCloudFunctionArgs, + RESET_VALUE, +} from "../../src/v1"; +import { MINIMAL_V1_ENDPOINT } from "../fixtures"; + +describe("makeCloudFunction", () => { + const cloudFunctionArgs: MakeCloudFunctionArgs = { + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + legacyEventType: "providers/provider/eventTypes/event", + }; + + it("calls init function", async () => { + const test: Event = { + context: { + eventId: "00000", + timestamp: "2016-11-04T21:29:03.496Z", + eventType: "provider.event", + resource: { + service: "provider", + name: "resource", + }, + }, + data: "data", + }; + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + }); + + let hello; + onInit(() => (hello = "world")); + expect(hello).is.undefined; + await cf(test.data, test.context); + expect(hello).equals("world"); + }); + + it("should put a __trigger/__endpoint on the returned CloudFunction", () => { + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + }); + + expect(cf.__trigger).to.deep.equal({ + eventTrigger: { + eventType: "mock.provider.mock.event", + resource: "resource", + service: "service", + }, + }); + + expect(cf.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "mock.provider.mock.event", + eventFilters: { + resource: "resource", + }, + retry: false, + }, + labels: {}, + }); + }); + + it("should have legacy event type in __trigger/__endpoint if provided", () => { + const cf = makeCloudFunction(cloudFunctionArgs); + + expect(cf.__trigger).to.deep.equal({ + eventTrigger: { + eventType: "providers/provider/eventTypes/event", + resource: "resource", + service: "service", + }, + }); + + expect(cf.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "providers/provider/eventTypes/event", + eventFilters: { + resource: "resource", + }, + retry: false, + }, + labels: {}, + }); + }); + + it("should include converted options in __endpoint", () => { + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + options: { + timeoutSeconds: 10, + regions: ["us-central1"], + memory: "128MB", + serviceAccount: "foo@google.com", + secrets: ["MY_SECRET"], + }, + }); + + expect(cf.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + timeoutSeconds: 10, + region: ["us-central1"], + availableMemoryMb: 128, + serviceAccountEmail: "foo@google.com", + eventTrigger: { + eventType: "mock.provider.mock.event", + eventFilters: { + resource: "resource", + }, + retry: false, + }, + secretEnvironmentVariables: [{ key: "MY_SECRET" }], + labels: {}, + }); + }); + + it("should set retry given failure policy in __endpoint", () => { + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + options: { failurePolicy: { retry: {} } }, + }); + + expect(cf.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "mock.provider.mock.event", + eventFilters: { + resource: "resource", + }, + retry: true, + }, + labels: {}, + }); + }); + + it("should setup a scheduleTrigger in __endpoint given a schedule", () => { + const schedule = { + schedule: "every 5 minutes", + retryConfig: { retryCount: 3 }, + timeZone: "America/New_York", + }; + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + options: { + schedule, + }, + }); + expect(cf.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + scheduleTrigger: { + ...schedule, + retryConfig: { + ...schedule.retryConfig, + maxBackoffDuration: RESET_VALUE, + maxDoublings: RESET_VALUE, + maxRetryDuration: RESET_VALUE, + minBackoffDuration: RESET_VALUE, + }, + }, + labels: {}, + }); + }); + + it("should setup a scheduleTrigger in __endpoint given a schedule and preserveExternalChanges", () => { + const schedule = { + schedule: "every 5 minutes", + retryConfig: { retryCount: 3 }, + timeZone: "America/New_York", + }; + const cf = makeCloudFunction({ + provider: "mock.provider", + eventType: "mock.event", + service: "service", + triggerResource: () => "resource", + handler: () => null, + options: { + schedule, + preserveExternalChanges: true, + }, + }); + expect(cf.__endpoint).to.deep.equal({ + platform: "gcfv1", + scheduleTrigger: { + ...schedule, + retryConfig: { + ...schedule.retryConfig, + }, + }, + labels: {}, + }); + }); + + it("should construct the right context for event", () => { + const args: any = { + ...cloudFunctionArgs, + handler: (data: any, context: EventContext) => context, + }; + const cf = makeCloudFunction(args); + const test: Event = { + context: { + eventId: "00000", + timestamp: "2016-11-04T21:29:03.496Z", + eventType: "provider.event", + resource: { + service: "provider", + name: "resource", + }, + }, + data: "data", + }; + + return expect(cf(test.data, test.context)).to.eventually.deep.equal({ + eventId: "00000", + timestamp: "2016-11-04T21:29:03.496Z", + eventType: "provider.event", + resource: { + service: "provider", + name: "resource", + }, + params: {}, + }); + }); + + it("should throw error when context.params accessed in handler environment", () => { + const args: any = { + ...cloudFunctionArgs, + handler: (data: any, context: EventContext) => context, + triggerResource: () => null, + }; + const cf = makeCloudFunction(args); + const test: Event = { + context: { + eventId: "00000", + timestamp: "2016-11-04T21:29:03.496Z", + eventType: "provider.event", + resource: { + service: "provider", + name: "resource", + }, + }, + data: "test data", + }; + + return cf(test.data, test.context).then((result) => { + expect(result).to.deep.equal({ + eventId: "00000", + timestamp: "2016-11-04T21:29:03.496Z", + eventType: "provider.event", + resource: { + service: "provider", + name: "resource", + }, + }); + expect(() => result.params).to.throw(Error); + }); + }); +}); + +describe("makeParams", () => { + const args: MakeCloudFunctionArgs = { + provider: "provider", + eventType: "event", + service: "service", + triggerResource: () => "projects/_/instances/pid/ref/{foo}/nested/{bar}", + handler: (data, context) => context.params, + legacyEventType: "legacyEvent", + }; + const cf = makeCloudFunction(args); + + it("should construct params from the event resource of events", () => { + const testEvent: Event = { + context: { + eventId: "111", + timestamp: "2016-11-04T21:29:03.496Z", + resource: { + service: "service", + name: "projects/_/instances/pid/ref/a/nested/b", + }, + eventType: "event", + }, + data: "data", + }; + + return expect(cf(testEvent.data, testEvent.context)).to.eventually.deep.equal({ + foo: "a", + bar: "b", + }); + }); +}); + +describe("makeAuth and makeAuthType", () => { + const args: MakeCloudFunctionArgs = { + provider: "google.firebase.database", + eventType: "event", + service: "service", + triggerResource: () => "projects/_/instances/pid/ref/{foo}/nested/{bar}", + handler: (data, context) => { + return { + auth: context.auth, + authType: context.authType, + }; + }, + }; + const cf = makeCloudFunction(args); + + it("should construct correct auth and authType for admin user", () => { + const testEvent = { + data: "data", + context: { + auth: { + admin: true, + }, + }, + }; + + return expect(cf(testEvent.data, testEvent.context)).to.eventually.deep.equal({ + auth: undefined, + authType: "ADMIN", + }); + }); + + it("should construct correct auth and authType for unauthenticated user", () => { + const testEvent = { + data: "data", + context: { + auth: { + admin: false, + }, + }, + }; + + return expect(cf(testEvent.data, testEvent.context)).to.eventually.deep.equal({ + auth: null, + authType: "UNAUTHENTICATED", + }); + }); + + it("should construct correct auth and authType for a user", () => { + const testEvent = { + data: "data", + context: { + auth: { + admin: false, + variable: { + uid: "user", + provider: "google", + token: { + sub: "user", + }, + }, + }, + }, + }; + + return expect(cf(testEvent.data, testEvent.context)).to.eventually.deep.equal({ + auth: { + uid: "user", + token: { + sub: "user", + }, + }, + authType: "USER", + }); + }); +}); diff --git a/spec/index.spec.ts b/spec/v1/config.spec.ts similarity index 65% rename from spec/index.spec.ts rename to spec/v1/config.spec.ts index 918ca80d1..15d2098ad 100644 --- a/spec/index.spec.ts +++ b/spec/v1/config.spec.ts @@ -20,24 +20,20 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. -import * as chai from 'chai'; -import * as chaiAsPromised from 'chai-as-promised'; -chai.use(chaiAsPromised); +import { expect } from "chai"; -import * as nock from 'nock'; -nock.disableNetConnect(); +import { config } from "../../src/v1/config"; -import 'mocha'; -import './utils.spec'; -import './apps.spec'; -import './cloud-functions.spec'; -import './config.spec'; -import './testing.spec'; -import './providers/analytics.spec'; -import './providers/auth.spec'; -import './providers/database.spec'; -import './providers/firestore.spec'; -import './providers/https.spec'; -import './providers/pubsub.spec'; -import './providers/storage.spec'; -import './providers/crashlytics.spec'; +describe("config()", () => { + it("throws an error with migration guidance", () => { + expect(() => { + // @ts-expect-error - config is deprecated and typed as never to cause a build error + config(); + }).to.throw( + Error, + "functions.config() has been removed in firebase-functions v7. " + + "Migrate to environment parameters using the params module. " + + "Migration guide: https://firebase.google.com/docs/functions/config-env#migrate-config" + ); + }); +}); diff --git a/spec/v1/function-builder.spec.ts b/spec/v1/function-builder.spec.ts new file mode 100644 index 000000000..955a0ab38 --- /dev/null +++ b/spec/v1/function-builder.spec.ts @@ -0,0 +1,595 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { clearParams, defineSecret } from "../../src/params"; + +import * as functions from "../../src/v1"; + +describe("FunctionBuilder", () => { + before(() => { + process.env.GCLOUD_PROJECT = "not-a-project"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should allow supported region to be set", () => { + const fn = functions + .region("us-east1") + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + }); + + it("should allow multiple supported regions to be set", () => { + const fn = functions + .region("us-east1", "us-central1") + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1", "us-central1"]); + expect(fn.__endpoint.region).to.deep.equal(["us-east1", "us-central1"]); + }); + + it("should allow all supported regions to be set", () => { + const fn = functions + .region( + "us-central1", + "us-east1", + "us-east4", + "europe-west1", + "europe-west2", + "europe-west3", + "asia-east2", + "asia-northeast1" + ) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.regions).to.deep.equal([ + "us-central1", + "us-east1", + "us-east4", + "europe-west1", + "europe-west2", + "europe-west3", + "asia-east2", + "asia-northeast1", + ]); + + expect(fn.__endpoint.region).to.deep.equal([ + "us-central1", + "us-east1", + "us-east4", + "europe-west1", + "europe-west2", + "europe-west3", + "asia-east2", + "asia-northeast1", + ]); + }); + + it("should allow valid runtime options to be set", () => { + const fn = functions + .runWith({ + timeoutSeconds: 90, + failurePolicy: { retry: {} }, + memory: "256MB", + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + expect(fn.__endpoint.eventTrigger.retry).to.deep.equal(true); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + }); + + it("should allow SecretParams in the secrets array and convert them", () => { + const sp = defineSecret("API_KEY"); + const fn = functions + .runWith({ + secrets: [sp], + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.secrets).to.deep.equal([ + { + name: "API_KEY", + }, + ]); + expect(fn.__endpoint.secretEnvironmentVariables).to.deep.equal([ + { + key: "API_KEY", + }, + ]); + + clearParams(); + }); + + it("should apply a default failure policy if it's aliased with `true`", () => { + const fn = functions + .runWith({ + failurePolicy: true, + memory: "256MB", + timeoutSeconds: 90, + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__endpoint.eventTrigger.retry).to.deep.equal(true); + }); + + it("should allow both supported region and valid runtime options to be set", () => { + const fn = functions + .region("europe-west2") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.regions).to.deep.equal(["europe-west2"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + expect(fn.__endpoint.region).to.deep.equal(["europe-west2"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should allow both valid runtime options and supported region to be set in reverse order", () => { + const fn = functions + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .region("europe-west1") + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.regions).to.deep.equal(["europe-west1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + expect(fn.__endpoint.region).to.deep.equal(["europe-west1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should fail if supported region but invalid runtime options are set (reverse order)", () => { + expect(() => { + functions.region("asia-northeast1").runWith({ timeoutSeconds: 600, memory: "256MB" }); + }).to.throw(Error, "TimeoutSeconds"); + }); + + it("should throw an error if user chooses a failurePolicy which is neither an object nor a boolean", () => { + expect(() => + functions.runWith({ + failurePolicy: 1234 as unknown as functions.RuntimeOptions["failurePolicy"], + }) + ).to.throw(Error, "failurePolicy must be a boolean or an object"); + }); + + it("should throw an error if user chooses a failurePolicy.retry which is not an object", () => { + expect(() => + functions.runWith({ + failurePolicy: { retry: 1234 as unknown as never }, + }) + ).to.throw(Error, "failurePolicy.retry"); + }); + + it("should throw an error if user chooses an invalid memory allocation", () => { + expect(() => { + return functions.runWith({ + memory: "unsupported", + } as any); + }).to.throw(Error, "memory"); + + expect(() => { + return functions.region("us-east1").runWith({ + memory: "unsupported", + } as any); + }).to.throw(Error, "memory"); + }); + + it("should throw an error if user chooses an invalid timeoutSeconds", () => { + expect(() => { + return functions.runWith({ + timeoutSeconds: 1000000, + } as any); + }).to.throw(Error, "TimeoutSeconds"); + + expect(() => { + return functions.region("asia-east2").runWith({ + timeoutSeconds: 1000000, + } as any); + }).to.throw(Error, "TimeoutSeconds"); + }); + + it("should throw an error if user chooses no region when using .region()", () => { + expect(() => { + return functions.region(); + }).to.throw(Error, "at least one region"); + + expect(() => { + return functions.region().runWith({ + timeoutSeconds: 500, + } as any); + }).to.throw(Error, "at least one region"); + }); + + it("should allow a ingressSettings to be set", () => { + const fn = functions + .runWith({ ingressSettings: "ALLOW_INTERNAL_ONLY" }) + .https.onRequest(() => undefined); + + expect(fn.__trigger.ingressSettings).to.equal("ALLOW_INTERNAL_ONLY"); + expect(fn.__endpoint.ingressSettings).to.equal("ALLOW_INTERNAL_ONLY"); + }); + + it("should throw an error if user chooses an invalid ingressSettings", () => { + expect(() => { + return functions.runWith({ + ingressSettings: "INVALID_OPTION", + } as any); + }).to.throw( + Error, + `The only valid ingressSettings values are: ${functions.INGRESS_SETTINGS_OPTIONS.join(",")}` + ); + }); + + it("should allow a vpcConnector to be set", () => { + const fn = functions + .runWith({ + vpcConnector: "test-connector", + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.vpcConnector).to.equal("test-connector"); + expect(fn.__endpoint.vpc).to.deep.equal({ connector: "test-connector" }); + }); + + it("should allow a vpcConnectorEgressSettings to be set", () => { + const fn = functions + .runWith({ + vpcConnector: "test-connector", + vpcConnectorEgressSettings: "PRIVATE_RANGES_ONLY", + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.vpcConnectorEgressSettings).to.equal("PRIVATE_RANGES_ONLY"); + expect(fn.__endpoint.vpc).to.deep.equal({ + connector: "test-connector", + egressSettings: "PRIVATE_RANGES_ONLY", + }); + }); + + it("should throw an error if user chooses an invalid vpcConnectorEgressSettings", () => { + expect(() => { + return functions.runWith({ + vpcConnector: "test-connector", + vpcConnectorEgressSettings: "INCORRECT_OPTION", + } as any); + }).to.throw( + Error, + `The only valid vpcConnectorEgressSettings values are: ${functions.VPC_EGRESS_SETTINGS_OPTIONS.join( + "," + )}` + ); + }); + + it("should allow a serviceAccount to be set as-is", () => { + const serviceAccount = "test-service-account@test.iam.gserviceaccount.com"; + const fn = functions + .runWith({ + serviceAccount, + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__endpoint.serviceAccountEmail).to.equal(serviceAccount); + expect(fn.__trigger.serviceAccountEmail).to.equal(serviceAccount); + }); + + it("should allow a serviceAccount to be set with generated service account email", () => { + const serviceAccount = "test-service-account@"; + const projectId = process.env.GCLOUD_PROJECT; + const fn = functions + .runWith({ + serviceAccount, + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.serviceAccountEmail).to.equal( + `test-service-account@${projectId}.iam.gserviceaccount.com` + ); + expect(fn.__endpoint.serviceAccountEmail).to.equal(`test-service-account@`); + }); + + it("should set a null serviceAccountEmail if service account is set to `default`", () => { + const serviceAccount = "default"; + const fn = functions + .runWith({ + serviceAccount, + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.serviceAccountEmail).to.be.null; + expect(fn.__endpoint.serviceAccountEmail).to.equal(serviceAccount); + }); + + it("should throw an error if serviceAccount is set to an invalid value", () => { + const serviceAccount = "test-service-account"; + expect(() => { + functions.runWith({ + serviceAccount, + }); + }).to.throw(); + }); + + it("should allow setting 4GB memory option", () => { + const fn = functions + .runWith({ + memory: "4GB", + }) + .region("europe-west1") + .auth.user() + .onCreate((user) => user); + + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(4096); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(4096); + }); + + it("should allow labels to be set", () => { + const fn = functions + .runWith({ + labels: { + "valid-key": "valid-value", + }, + }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.labels).to.deep.equal({ + "valid-key": "valid-value", + }); + expect(fn.__endpoint.labels).to.deep.equal({ + "valid-key": "valid-value", + }); + }); + + it("should throw an error if more than 58 labels are set", () => { + const labels = {}; + for (let i = 0; i < 59; i++) { + labels[`label${i}`] = "value"; + } + + expect(() => + functions.runWith({ + labels, + }) + ).to.throw(); + }); + + it("should throw an error if labels has a key that is too long", () => { + expect(() => + functions.runWith({ + labels: { + "a-very-long-key-that-is-more-than-the-maximum-allowed-length-for-keys": "value", + }, + }) + ).to.throw(); + }); + + it("should throw an error if labels has key that is too short", () => { + expect(() => + functions.runWith({ + labels: { "": "value" }, + }) + ).to.throw(); + }); + + it("should throw an error if labels has a value that is too long", () => { + expect(() => + functions.runWith({ + labels: { + key: "a-very-long-value-that-is-more-than-the-maximum-allowed-length-for-values", + }, + }) + ).to.throw(); + }); + + it("should throw an error if labels has a key that contains invalid characters", () => { + expect(() => + functions.runWith({ + labels: { + Key: "value", + }, + }) + ).to.throw(); + + expect(() => + functions.runWith({ + labels: { + "key ": "value", + }, + }) + ).to.throw(); + + expect(() => + functions.runWith({ + labels: { + "1key": "value", + }, + }) + ).to.throw(); + }); + + it("should throw an error if labels has a value that contains invalid characters", () => { + expect(() => + functions.runWith({ + labels: { + key: "Value", + }, + }) + ).to.throw(); + + expect(() => + functions.runWith({ + labels: { + "key ": "va lue", + }, + }) + ).to.throw(); + }); + + it("should throw an error if a label key starts with a reserved namespace", () => { + expect(() => + functions.runWith({ + labels: { + "firebase-foo": "value", + }, + }) + ).to.throw(); + + expect(() => + functions.runWith({ + labels: { + "deployment-bar": "value", + }, + }) + ).to.throw(); + }); + + it("should throw an error if invoker is an empty string", () => { + expect(() => + functions.runWith({ + invoker: "", + }) + ).to.throw(); + }); + + it("should throw an error if invoker is an empty array", () => { + expect(() => + functions.runWith({ + invoker: [""], + }) + ).to.throw(); + }); + + it("should throw an error if invoker has an empty string", () => { + expect(() => + functions.runWith({ + invoker: ["service-account1", "", "service-account2"], + }) + ).to.throw(); + }); + + it("should throw an error if public identifier is in the invoker array", () => { + expect(() => + functions.runWith({ + invoker: ["service-account1", "public", "service-account2"], + }) + ).to.throw(); + }); + + it("should throw an error if private identifier is in the invoker array", () => { + expect(() => + functions.runWith({ + invoker: ["service-account1", "private", "service-account2"], + }) + ).to.throw(); + }); + + it("should allow valid secret config expressed using short form", () => { + const secrets = ["API_KEY"]; + const fn = functions + .runWith({ secrets }) + .auth.user() + .onCreate((user) => user); + + expect(fn.__trigger.secrets).to.deep.equal(secrets); + expect(fn.__endpoint.secretEnvironmentVariables).to.deep.equal([{ key: secrets[0] }]); + }); + + it("should throw error given secrets expressed with full resource name", () => { + expect(() => + functions.runWith({ + secrets: ["projects/my-project/secrets/API_KEY"], + }) + ).to.throw(); + }); + + it("should throw error given invalid secret config", () => { + const sp = defineSecret("projects/my-project/secrets/API_KEY"); + expect(() => + functions.runWith({ + secrets: [sp], + }) + ).to.throw(); + clearParams(); + }); + + it("should throw error given invalid secret config", () => { + const sp = defineSecret("ABC/efg"); + + expect(() => + functions.runWith({ + secrets: ["ABC/efg"], + }) + ).to.throw(); + + expect(() => + functions.runWith({ + secrets: [sp], + }) + ).to.throw(); + clearParams(); + }); + + it("should throw error given invalid secret with versions", () => { + const sp = defineSecret("ABC@3"); + + expect(() => + functions.runWith({ + secrets: ["ABC@3"], + }) + ).to.throw(); + + expect(() => + functions.runWith({ + secrets: [sp], + }) + ).to.throw(); + clearParams(); + }); +}); diff --git a/spec/providers/analytics.spec.input.ts b/spec/v1/providers/analytics.spec.input.ts similarity index 73% rename from spec/providers/analytics.spec.input.ts rename to spec/v1/providers/analytics.spec.input.ts index 84ac4c3ce..9edb62a4e 100644 --- a/spec/providers/analytics.spec.input.ts +++ b/spec/v1/providers/analytics.spec.input.ts @@ -21,7 +21,7 @@ // SOFTWARE. /* tslint:disable:max-line-length */ -import { AnalyticsEvent } from '../../src/providers/analytics'; +import { AnalyticsEvent } from "../../../src/v1/providers/analytics"; // A payload, as it might arrive over the wire. Every possible field is filled out at least once. export const fullPayload = JSON.parse(`{ @@ -117,70 +117,75 @@ export const fullPayload = JSON.parse(`{ } } }, - "eventId": "1486080145623867projects/analytics-integration-fd82a/events/i_made_this_upproviders/google.firebase.analytics/eventTypes/event.sendprojects/f949d1bb9ef782579-tp/topics/cloud-functions-u54ejabpzs4prfjh7433eklhae", - "eventType": "providers/google.firebase.analytics/eventTypes/event.send", - "resource": "projects/analytics-integration-fd82a/events/i_made_this_up", - "timestamp": "2017-03-29T23:59:59.986371388Z" + "context": { + "eventId": "1486080145623867projects/analytics-integration-fd82a/events/i_made_this_upproviders/google.firebase.analytics/eventTypes/event.sendprojects/f949d1bb9ef782579-tp/topics/cloud-functions-u54ejabpzs4prfjh7433eklhae", + "eventType": "providers/google.firebase.analytics/eventTypes/event.send", + "timestamp": "2017-03-29T23:59:59.986371388Z", + "resource": { + "service": "app-measurement.com", + "name": "projects/analytics-integration-fd82a/events/i_made_this_up" + } + } }`); // The event data that we expect would be constructed if the payload above were to arrive. export const data: AnalyticsEvent = { - reportingDate: '20170202', - name: 'Loaded_In_Background', + reportingDate: "20170202", + name: "Loaded_In_Background", params: { - build: '1350', + build: "1350", calls_remaining: 10, fraction_calls_dropped: 0.0123456, average_call_rating: 4.5, }, - logTime: '2017-02-02T23:06:26.124Z', - previousLogTime: '2017-02-02T23:01:19.797Z', + logTime: "2017-02-02T23:06:26.124Z", + previousLogTime: "2017-02-02T23:01:19.797Z", valueInUSD: 1234.5, user: { - userId: 'abcdefghijklmnop!', + userId: "abcdefghijklmnop!", appInfo: { - appId: 'com.mobileday.MobileDay', - appInstanceId: 'E3C9939401814B9B954725A740B8C7BC', - appPlatform: 'IOS', - appStore: 'iTunes', - appVersion: '5.2.0', + appId: "com.mobileday.MobileDay", + appInstanceId: "E3C9939401814B9B954725A740B8C7BC", + appPlatform: "IOS", + appStore: "iTunes", + appVersion: "5.2.0", }, bundleInfo: { bundleSequenceId: 6034, serverTimestampOffset: 371, }, deviceInfo: { - deviceCategory: 'mobile', - deviceModel: 'iPhone7,2', + deviceCategory: "mobile", + deviceModel: "iPhone7,2", deviceTimeZoneOffsetSeconds: -21600, - mobileBrandName: 'Apple', - mobileMarketingName: 'iPhone 6', - mobileModelName: 'iPhone 6', - platformVersion: '10.2.1', - userDefaultLanguage: 'en-us', - deviceId: '599F9C00-92DC-4B5C-9464-7971F01F8370', - resettableDeviceId: '599F9C00-92DC-4B5C-9464-7971F01F8370', + mobileBrandName: "Apple", + mobileMarketingName: "iPhone 6", + mobileModelName: "iPhone 6", + platformVersion: "10.2.1", + userDefaultLanguage: "en-us", + deviceId: "599F9C00-92DC-4B5C-9464-7971F01F8370", + resettableDeviceId: "599F9C00-92DC-4B5C-9464-7971F01F8370", limitedAdTracking: true, }, - firstOpenTime: '2016-04-28T15:00:35.819Z', + firstOpenTime: "2016-04-28T15:00:35.819Z", geoInfo: { - city: 'Plano', - continent: '021', - country: 'United States', - region: 'Texas', + city: "Plano", + continent: "021", + country: "United States", + region: "Texas", }, userProperties: { build: { - setTime: '2017-02-02T23:06:26.090Z', - value: '1350', + setTime: "2017-02-02T23:06:26.090Z", + value: "1350", }, calls_remaining: { - setTime: '2017-02-02T23:06:26.094Z', - value: '10', + setTime: "2017-02-02T23:06:26.094Z", + value: "10", }, version: { - setTime: '2017-02-02T23:06:26.085Z', - value: '5.2.0', + setTime: "2017-02-02T23:06:26.085Z", + value: "5.2.0", }, }, }, diff --git a/spec/v1/providers/analytics.spec.ts b/spec/v1/providers/analytics.spec.ts new file mode 100644 index 000000000..2cc1d8ecd --- /dev/null +++ b/spec/v1/providers/analytics.spec.ts @@ -0,0 +1,416 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import * as functions from "../../../src/v1"; +import { Event } from "../../../src/v1/cloud-functions"; +import * as analytics from "../../../src/v1/providers/analytics"; +import * as analyticsSpecInput from "./analytics.spec.input"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Analytics Functions", () => { + describe("EventBuilder", () => { + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .analytics.event("event") + .onLog((event) => event); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + describe("#onLog", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const cloudFunction = analytics.event("first_open").onLog(() => null); + + expect(cloudFunction.__trigger).to.deep.equal({ + eventTrigger: { + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: "projects/project1/events/first_open", + service: "app-measurement.com", + }, + }); + + expect(cloudFunction.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventFilters: { + resource: "projects/project1/events/first_open", + }, + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + retry: false, + }, + labels: {}, + }); + }); + }); + + describe("#dataConstructor", () => { + it("should handle an event with the appropriate fields", async () => { + const cloudFunction = analytics + .event("first_open") + .onLog((data: analytics.AnalyticsEvent) => data); + + // The event data delivered over the wire will be the JSON for an AnalyticsEvent: + // https://firebase.google.com/docs/auth/admin/manage-users#retrieve_user_data + const event: Event = { + data: { + userDim: { + userId: "hi!", + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: { + service: "app-measurement.com", + name: "projects/project1/events/first_open", + }, + }, + }; + + await expect(cloudFunction(event.data, event.context)).to.eventually.deep.equal({ + params: {}, + user: { + userId: "hi!", + userProperties: {}, + }, + }); + }); + + it("should remove xValues", () => { + const cloudFunction = analytics + .event("first_open") + .onLog((data: analytics.AnalyticsEvent) => data); + + // Incoming events will have four kinds of "xValue" fields: "intValue", + // "stringValue", "doubleValue" and "floatValue". We expect those to get + // flattened away, leaving just their values. + const event: Event = { + data: { + eventDim: [ + { + date: "20170202", + name: "Loaded_In_Background", + params: { + build: { + stringValue: "1350", + }, + calls_remaining: { + intValue: "10", + }, + goats_teleported: { + doubleValue: 1.1, + }, + boat_boyancy: { + floatValue: 133.7, + }, + }, + }, + ], + userDim: { + userProperties: { + foo: { + value: { + stringValue: "bar", + }, + }, + }, + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: { + service: "app-measurement.com", + name: "projects/project1/events/first_open", + }, + }, + }; + + return expect(cloudFunction(event.data, event.context)).to.eventually.deep.equal({ + reportingDate: "20170202", + name: "Loaded_In_Background", + params: { + build: "1350", + calls_remaining: 10, + goats_teleported: 1.1, + boat_boyancy: 133.7, + }, + user: { + userProperties: { + foo: { + value: "bar", + }, + }, + }, + }); + }); + + it("should change microsecond timestamps to ISO strings, and offsets to millis", () => { + const cloudFunction = analytics + .event("first_open") + .onLog((data: analytics.AnalyticsEvent) => data); + + const event: Event = { + data: { + eventDim: [ + { + date: "20170202", + name: "Loaded_In_Background", + timestampMicros: "1489080600000000", + previousTimestampMicros: "526657020000000", + }, + ], + userDim: { + firstOpenTimestampMicros: "577978620000000", + userProperties: { + foo: { + value: { + stringValue: "bar", + }, + setTimestampUsec: "514820220000000", + }, + }, + bundleInfo: { + serverTimestampOffsetMicros: 9876789, + }, + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: { + service: "app-measurement.com", + name: "projects/project1/events/first_open", + }, + }, + }; + + return expect(cloudFunction(event.data, event.context)).to.eventually.deep.equal({ + reportingDate: "20170202", + name: "Loaded_In_Background", + params: {}, + logTime: "2017-03-09T17:30:00.000Z", + previousLogTime: "1986-09-09T13:37:00.000Z", + user: { + firstOpenTime: "1988-04-25T13:37:00.000Z", + userProperties: { + foo: { + value: "bar", + setTime: "1986-04-25T13:37:00.000Z", + }, + }, + bundleInfo: { + serverTimestampOffset: 9877, + }, + }, + }); + }); + + it("should populate currency fields", () => { + const cloudFunction = analytics + .event("first_open") + .onLog((data: analytics.AnalyticsEvent) => data); + + // Incoming events will have four kinds of "xValue" fields: "intValue", + // "stringValue", "doubleValue" and "floatValue". We expect those to get + // flattened away, leaving just their values. + // + // xValues in eventDim[...].params should also populate a 'rawValue' field + // that always contains a string. + // + // Separately, the input has a number of microsecond timestamps that we'd + // like to rename and scale down to milliseconds. + const event: Event = { + data: { + eventDim: [ + { + date: "20170202", + name: "Loaded_In_Background", + valueInUsd: 123.4, + }, + ], + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: { + service: "app-measurement.com", + name: "projects/project1/events/first_open", + }, + }, + }; + + return expect(cloudFunction(event.data, event.context)).to.eventually.deep.equal({ + reportingDate: "20170202", + name: "Loaded_In_Background", + params: {}, + valueInUSD: 123.4, // Field renamed Usd -> USD. + }); + }); + + it("should recognize all the fields the payload can contain", () => { + const cloudFunction = analytics + .event("first_open") + .onLog((data: analytics.AnalyticsEvent) => data); + // The payload in analytics_spec_input contains all possible fields at least once. + const payloadData = analyticsSpecInput.fullPayload.data; + const payloadContext = analyticsSpecInput.fullPayload.context; + + return expect(cloudFunction(payloadData, payloadContext)).to.eventually.deep.equal( + analyticsSpecInput.data + ); + }); + + it("should handle null and missing user property values without throwing", () => { + const cloudFunction = analytics + .event("app_remove") + .onLog((data: analytics.AnalyticsEvent) => data); + + const event: Event = { + data: { + eventDim: [ + { + name: "app_remove", + params: {}, + date: "20240114", + timestampMicros: "1705257600000000", + }, + ], + userDim: { + userProperties: { + // Invalid properties that should be filtered out: + null_property: null, + value_null: { + value: null, + }, + value_undefined: { + value: undefined, + }, + empty_object: {}, + value_empty_object: { + value: {}, + }, + // Valid properties that should be kept: + valid_string: { + value: { + stringValue: "test", + }, + setTimestampUsec: "1486076786090987", + }, + valid_empty_string: { + value: { + stringValue: "", + }, + setTimestampUsec: "1486076786090987", + }, + valid_zero: { + value: { + intValue: "0", + }, + setTimestampUsec: "1486076786090987", + }, + }, + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/google.firebase.analytics/eventTypes/event.log", + resource: { + service: "app-measurement.com", + name: "projects/project1/events/app_remove", + }, + }, + }; + + return expect(cloudFunction(event.data, event.context)).to.eventually.deep.equal({ + reportingDate: "20240114", + name: "app_remove", + params: {}, + logTime: "2024-01-14T18:40:00.000Z", + user: { + userProperties: { + valid_string: { + value: "test", + setTime: "2017-02-02T23:06:26.090Z", + }, + valid_empty_string: { + value: "", + setTime: "2017-02-02T23:06:26.090Z", + }, + valid_zero: { + value: "0", + setTime: "2017-02-02T23:06:26.090Z", + }, + }, + }, + }); + }); + }); + }); + + describe("process.env.GCLOUD_PROJECT not set", () => { + it("should not throw if __trigger is not accessed", () => { + expect(() => analytics.event("event").onLog(() => null)).to.not.throw(Error); + }); + + it("should throw when __endpoint is accessed", () => { + expect(() => analytics.event("event").onLog(() => null).__endpoint).to.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => analytics.event("event").onLog(() => null).__trigger).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = analytics.event("event").onLog(() => null); + + expect(cf.run).to.not.throw(Error); + }); + }); +}); diff --git a/spec/v1/providers/auth.spec.ts b/spec/v1/providers/auth.spec.ts new file mode 100644 index 000000000..ec1a793f5 --- /dev/null +++ b/spec/v1/providers/auth.spec.ts @@ -0,0 +1,522 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { UserRecord } from "../../../src/common/providers/identity"; +import * as functions from "../../../src/v1"; +import { CloudFunction, Event } from "../../../src/v1/cloud-functions"; +import * as auth from "../../../src/v1/providers/auth"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Auth Functions", () => { + const event: Event = { + data: { + metadata: { + creationTime: "2016-12-15T19:37:37.059Z", + lastSignInTime: "2017-01-01T00:00:00.000Z", + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "providers/firebase.auth/eventTypes/user.delete", + resource: { + service: "firebaseauth.googleapis.com", + name: "projects/project1", + }, + }, + }; + + describe("AuthBuilder", () => { + function expectedTrigger(project: string, eventType: string) { + return { + eventTrigger: { + resource: `projects/${project}`, + eventType: `providers/firebase.auth/eventTypes/${eventType}`, + service: "firebaseauth.googleapis.com", + }, + }; + } + + function expectedEndpoint(project: string, eventType: string) { + return { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventFilters: { + resource: `projects/${project}`, + }, + eventType: `providers/firebase.auth/eventTypes/${eventType}`, + retry: false, + }, + labels: {}, + }; + } + + const handler = () => { + return Promise.resolve(); + }; + + const project = "project1"; + + before(() => { + process.env.GCLOUD_PROJECT = project; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user() + .onCreate(() => null); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + describe("#onCreate", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const cloudFunction = auth.user().onCreate(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(project, "user.create")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(project, "user.create")); + }); + }); + + describe("#onDelete", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const cloudFunction = auth.user().onDelete(handler); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(project, "user.delete")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(project, "user.delete")); + }); + }); + + describe("beforeCreate", () => { + it("should create the function without options", () => { + const fn = auth.user().beforeCreate(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should create the function with options", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user({ + blockingOptions: { + accessToken: true, + refreshToken: false, + }, + }) + .beforeCreate(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + regions: ["us-east1"], + availableMemoryMb: 256, + timeout: "90s", + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("beforeSignIn", () => { + it("should create the function without options", () => { + const fn = auth.user().beforeSignIn(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should create the function with options", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user({ + blockingOptions: { + accessToken: true, + refreshToken: false, + }, + }) + .beforeSignIn(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + regions: ["us-east1"], + availableMemoryMb: 256, + timeout: "90s", + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("beforeEmail", () => { + it("should create function without options", () => { + const fn = auth.user().beforeEmail(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should create the function with options", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user({ + blockingOptions: { + accessToken: true, + refreshToken: false, + }, + }) + .beforeEmail(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + regions: ["us-east1"], + availableMemoryMb: 256, + timeout: "90s", + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("beforeSms", () => { + it("should create function without options", () => { + const fn = auth.user().beforeSms(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should create the function with options", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .auth.user({ + blockingOptions: { + accessToken: true, + refreshToken: false, + }, + }) + .beforeSms(() => Promise.resolve()); + + expect(fn.__trigger).to.deep.equal({ + labels: {}, + regions: ["us-east1"], + availableMemoryMb: 256, + timeout: "90s", + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + labels: {}, + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + blockingTrigger: { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + options: { + accessToken: true, + idToken: false, + refreshToken: false, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("#_dataConstructor", () => { + let cloudFunctionDelete: CloudFunction; + + before(() => { + cloudFunctionDelete = auth.user().onDelete((data: UserRecord) => data); + }); + + it("should handle wire format as of v5.0.0 of firebase-admin", () => { + return cloudFunctionDelete(event.data, event.context).then((data: any) => { + expect(data.metadata.creationTime).to.equal("2016-12-15T19:37:37.059Z"); + expect(data.metadata.lastSignInTime).to.equal("2017-01-01T00:00:00.000Z"); + }); + }); + }); + }); + + describe("process.env.GCLOUD_PROJECT not set", () => { + it("should not throw if __trigger is not accessed", () => { + expect(() => auth.user().onCreate(() => null)).to.not.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => auth.user().onCreate(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => auth.user().onCreate(() => null).__endpoint).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = auth.user().onCreate(() => null); + expect(cf.run).to.not.throw(Error); + }); + }); +}); diff --git a/spec/v1/providers/database.spec.ts b/spec/v1/providers/database.spec.ts new file mode 100644 index 000000000..c189ca4eb --- /dev/null +++ b/spec/v1/providers/database.spec.ts @@ -0,0 +1,794 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { getApp, setApp } from "../../../src/common/app"; +import * as config from "../../../src/common/config"; +import { applyChange } from "../../../src/common/utilities/utils"; +import * as functions from "../../../src/v1"; +import * as database from "../../../src/v1/providers/database"; +import { expectType } from "../../common/metaprogramming"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Database Functions", () => { + describe("DatabaseBuilder", () => { + // TODO add tests for building a data or change based on the type of operation + + function expectedTrigger(resource: string, eventType: string) { + return { + eventTrigger: { + resource, + eventType: `providers/google.firebase.database/eventTypes/${eventType}`, + service: "firebaseio.com", + }, + }; + } + + function expectedEndpoint(resource: string, eventType: string) { + return { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventFilters: { + resource, + }, + eventType: `providers/google.firebase.database/eventTypes/${eventType}`, + retry: false, + }, + labels: {}, + }; + } + + before(() => { + config.resetCache({ + databaseURL: "https://subdomain.apse.firebasedatabase.app", + }); + }); + + after(() => { + config.resetCache(undefined); + setApp(undefined); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .database.ref("/") + .onCreate((snap) => snap); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + describe("#onWrite()", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const func = database.ref("foo").onWrite(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/subdomain/refs/foo", "ref.write") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/subdomain/refs/foo", "ref.write") + ); + }); + + it("should let developers choose a database instance", () => { + const func = database + .instance("custom") + .ref("foo") + .onWrite(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/custom/refs/foo", "ref.write") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/custom/refs/foo", "ref.write") + ); + }); + + it("should return a handler that emits events with a proper DataSnapshot", () => { + const event = { + data: { + data: null, + delta: { foo: "bar" }, + }, + context: { + eventId: "70172329041928", + eventType: "providers/google.firebase.database/eventTypes/ref.write", + timestamp: "2018-04-09T07:56:12.975Z", + resource: "projects/_/instances/subdomains/refs/users", + }, + }; + const handler = database.ref("/users/{id}").onWrite((change) => { + expect(change.after.val()).to.deep.equal({ foo: "bar" }); + }); + + return handler(event.data, event.context); + }); + + it("Should have params of the correct type", () => { + database.ref("foo").onWrite((event, context) => { + expectType>(context.params); + }); + database.ref("foo/{bar}").onWrite((event, context) => { + expectType<{ bar: string }>(context.params); + }); + database.ref("foo/{bar}/{baz}").onWrite((event, context) => { + expectType<{ bar: string; baz: string }>(context.params); + }); + }); + }); + + describe("#onCreate()", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const func = database.ref("foo").onCreate(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/subdomain/refs/foo", "ref.create") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/subdomain/refs/foo", "ref.create") + ); + }); + + it("should let developers choose a database instance", () => { + const func = database + .instance("custom") + .ref("foo") + .onCreate(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/custom/refs/foo", "ref.create") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/custom/refs/foo", "ref.create") + ); + }); + + it("should return a handler that emits events with a proper DataSnapshot", () => { + const event = { + data: { + data: null, + delta: { foo: "bar" }, + }, + context: { + eventId: "70172329041928", + eventType: "providers/google.firebase.database/eventTypes/ref.create", + timestamp: "2018-04-09T07:56:12.975Z", + resource: "projects/_/instances/subdomains/refs/users", + }, + }; + + const handler = database.ref("/users/{id}").onCreate((data) => { + expect(data.val()).to.deep.equal({ foo: "bar" }); + }); + + return handler(event.data, event.context); + }); + + it("Should have params of the correct type", () => { + database.ref("foo").onCreate((event, context) => { + expectType>(context.params); + }); + database.ref("foo/{bar}").onCreate((event, context) => { + expectType<{ bar: string }>(context.params); + }); + database.ref("foo/{bar}/{baz}").onCreate((event, context) => { + expectType<{ bar: string; baz: string }>(context.params); + }); + }); + }); + + describe("#onUpdate()", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const func = database.ref("foo").onUpdate(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/subdomain/refs/foo", "ref.update") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/subdomain/refs/foo", "ref.update") + ); + }); + + it("should let developers choose a database instance", () => { + const func = database + .instance("custom") + .ref("foo") + .onUpdate(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/custom/refs/foo", "ref.update") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/custom/refs/foo", "ref.update") + ); + }); + + it("should return a handler that emits events with a proper DataSnapshot", () => { + const event = { + data: { + data: null, + delta: { foo: "bar" }, + }, + context: { + eventId: "70172329041928", + eventType: "providers/google.firebase.database/eventTypes/ref.update", + timestamp: "2018-04-09T07:56:12.975Z", + resource: "projects/_/instances/subdomains/refs/users", + }, + }; + + const handler = database.ref("/users/{id}").onUpdate((change) => { + expect(change.after.val()).to.deep.equal({ foo: "bar" }); + }); + + return handler(event.data, event.context); + }); + + it("Should have params of the correct type", () => { + database.ref("foo").onUpdate((event, context) => { + expectType>(context.params); + }); + database.ref("foo/{bar}").onUpdate((event, context) => { + expectType<{ bar: string }>(context.params); + }); + database.ref("foo/{bar}/{baz}").onUpdate((event, context) => { + expectType<{ bar: string; baz: string }>(context.params); + }); + }); + }); + + describe("#onDelete()", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const func = database.ref("foo").onDelete(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/subdomain/refs/foo", "ref.delete") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/subdomain/refs/foo", "ref.delete") + ); + }); + + it("should let developers choose a database instance", () => { + const func = database + .instance("custom") + .ref("foo") + .onDelete(() => null); + + expect(func.__trigger).to.deep.equal( + expectedTrigger("projects/_/instances/custom/refs/foo", "ref.delete") + ); + + expect(func.__endpoint).to.deep.equal( + expectedEndpoint("projects/_/instances/custom/refs/foo", "ref.delete") + ); + }); + + it("should return a handler that emits events with a proper DataSnapshot", () => { + const event = { + data: { + data: { foo: "bar" }, + delta: null, + }, + context: { + eventId: "70172329041928", + eventType: "providers/google.firebase.database/eventTypes/ref.delete", + timestamp: "2018-04-09T07:56:12.975Z", + resource: "projects/_/instances/subdomains/refs/users", + }, + }; + + const handler = database.ref("/users/{id}").onDelete((data) => { + expect(data.val()).to.deep.equal({ foo: "bar" }); + }); + + return handler(event.data, event.context); + }); + + it("Should have params of the correct type", () => { + database.ref("foo").onDelete((event, context) => { + expectType>(context.params); + }); + database.ref("foo/{bar}").onDelete((event, context) => { + expectType<{ bar: string }>(context.params); + }); + database.ref("foo/{bar}/{baz}").onDelete((event, context) => { + expectType<{ bar: string; baz: string }>(context.params); + }); + }); + }); + }); + + describe("process.env.FIREBASE_CONFIG not set", () => { + it("should not throw if __trigger is not accessed", () => { + expect(() => database.ref("/path").onWrite(() => null)).to.not.throw(Error); + }); + }); + + it("should throw when trigger is accessed", () => { + expect(() => database.ref("/path").onWrite(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => database.ref("/path").onWrite(() => null).__endpoint).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = database.ref("/path").onWrite(() => null); + expect(cf.run).to.not.throw(Error); + }); + + describe("extractInstanceAndPath", () => { + it("should return correct us-central prod instance and path strings if domain is missing", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/bar", + undefined + ); + expect(instance).to.equal("https://foo.firebaseio.com"); + expect(path).to.equal("/bar"); + }); + + it("should return the correct staging instance and path strings if domain is present", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/bar", + "firebaseio-staging.com" + ); + expect(instance).to.equal("https://foo.firebaseio-staging.com"); + expect(path).to.equal("/bar"); + }); + + it("should return the correct instance and path strings if root path is /refs", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/refs" + ); + expect(instance).to.equal("https://foo.firebaseio.com"); + expect(path).to.equal("/refs"); + }); + + it("should return the correct instance and path strings if a child path contain /refs", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/root/refs" + ); + expect(instance).to.equal("https://foo.firebaseio.com"); + expect(path).to.equal("/root/refs"); + }); + + it("should return the correct multi-region instance and path strings if domain is present", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/bar", + "euw1.firebasedatabase.app" + ); + expect(instance).to.equal("https://foo.euw1.firebasedatabase.app"); + expect(path).to.equal("/bar"); + }); + + it("should throw an error if the given instance name contains anything except alphanumerics and dashes", () => { + expect(() => { + return database.extractInstanceAndPath( + "projects/_/instances/a.bad.name/refs/bar", + undefined + ); + }).to.throw(Error); + expect(() => { + return database.extractInstanceAndPath( + "projects/_/instances/a_different_bad_name/refs/bar", + undefined + ); + }).to.throw(Error); + expect(() => { + return database.extractInstanceAndPath("projects/_/instances/BAD!!!!/refs/bar", undefined); + }).to.throw(Error); + }); + + it("should use the emulator host when present", () => { + process.env.FIREBASE_DATABASE_EMULATOR_HOST = "localhost:1234"; + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/bar", + "firebaseio-staging.com" + ); + expect(instance).to.equal("http://localhost:1234/?ns=foo"); + expect(path).to.equal("/bar"); + delete process.env.FIREBASE_DATABASE_EMULATOR_HOST; + }); + }); +}); + +describe("DataSnapshot", () => { + let subject: any; + + const populate = (data: any) => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/other-subdomain/refs/foo", + "firebaseio-staging.com" + ); + subject = new database.DataSnapshot(data, path, getApp(), instance); + }; + + describe("#ref: firebase.database.Reference", () => { + it("should return a ref for correct instance, not the default instance", () => { + populate({}); + expect(subject.ref.toJSON()).to.equal("https://other-subdomain.firebaseio-staging.com/foo"); + }); + }); + + describe("#val(): any", () => { + it("should return child values based on the child path", () => { + populate(applyChange({ a: { b: "c" } }, { a: { d: "e" } })); + expect(subject.child("a").val()).to.deep.equal({ b: "c", d: "e" }); + }); + + it("should return null for children past a leaf", () => { + populate(applyChange({ a: 23 }, { b: 33 })); + expect(subject.child("a/b").val()).to.be.null; + expect(subject.child("b/c").val()).to.be.null; + expect(subject.child("a/b/c").val()).to.be.null; + }); + + it("should return a leaf value", () => { + populate(23); + expect(subject.val()).to.eq(23); + populate({ b: 23, a: null }); + expect(subject.child("b").val()).to.eq(23); + }); + + it("should coerce object into array if all keys are integers", () => { + populate({ 0: "a", 1: "b", 2: { c: "d" } }); + expect(subject.val()).to.deep.equal(["a", "b", { c: "d" }]); + populate({ 0: "a", 2: "b", 3: { c: "d" } }); + expect(subject.val()).to.deep.equal(["a", undefined, "b", { c: "d" }]); + populate({ foo: { 0: "a", 1: "b" } }); + expect(subject.val()).to.deep.equal({ foo: ["a", "b"] }); + }); + + // Regression test: zero-values (including children) were accidentally forwarded as 'null'. + it("should deal with zero-values appropriately", () => { + populate(0); + expect(subject.val()).to.equal(0); + populate({ myKey: 0 }); + expect(subject.val()).to.deep.equal({ myKey: 0 }); + }); + + // Regression test: .val() was returning array of nulls when there's a property called length (BUG#37683995) + it('should return correct values when data has "length" property', () => { + populate({ length: 3, foo: "bar" }); + expect(subject.val()).to.deep.equal({ length: 3, foo: "bar" }); + }); + + it("should deal with null-values appropriately", () => { + populate(null); + expect(subject.val()).to.be.null; + expect(subject.child("a").val()).to.be.null; + expect(subject.child("a/b").val()).to.be.null; + + populate({ myKey: null }); + expect(subject.val()).to.be.null; + expect(subject.child("myKey").val()).to.be.null; + expect(subject.child("myKey/a").val()).to.be.null; + expect(subject.child("myKey/a/b").val()).to.be.null; + expect(subject.child("a").val()).to.be.null; + expect(subject.child("a/b").val()).to.be.null; + }); + + it("should deal with empty object values appropriately", () => { + populate({}); + expect(subject.val()).to.be.null; + expect(subject.child("a").val()).to.be.null; + + populate({ myKey: {} }); + expect(subject.val()).to.be.null; + expect(subject.child("myKey").val()).to.be.null; + + populate({ myKey: { child: null } }); + expect(subject.val()).to.be.null; + expect(subject.child("myKey").val()).to.be.null; + expect(subject.child("myKey/child").val()).to.be.null; + }); + + it("should deal with empty array values appropriately", () => { + populate([]); + expect(subject.val()).to.be.null; + + populate({ myKey: [] }); + expect(subject.val()).to.be.null; + + populate({ myKey: [null] }); + expect(subject.val()).to.be.null; + + populate({ myKey: [{}] }); + expect(subject.val()).to.be.null; + + populate({ myKey: [{ myKey: null }] }); + expect(subject.val()).to.be.null; + + populate({ myKey: [{ myKey: {} }] }); + expect(subject.val()).to.be.null; + }); + }); + + describe("#child(): DataSnapshot", () => { + it("should work with multiple calls", () => { + populate({ a: { b: { c: "d" } } }); + expect(subject.child("a").child("b/c").val()).to.equal("d"); + }); + }); + + describe("#exists(): boolean", () => { + it("should be true for an object value", () => { + populate({ a: { b: "c" } }); + expect(subject.child("a").exists()).to.be.true; + }); + + it("should be true for a leaf value", () => { + populate({ a: { b: "c" } }); + expect(subject.child("a/b").exists()).to.be.true; + }); + + it("should be false for a non-existent value", () => { + populate({ a: { b: "c", nullChild: null } }); + expect(subject.child("d").exists()).to.be.false; + expect(subject.child("nullChild").exists()).to.be.false; + }); + + it("should be false for a value pathed beyond a leaf", () => { + populate({ a: { b: "c" } }); + expect(subject.child("a/b/c").exists()).to.be.false; + }); + + it("should be false for an empty object value", () => { + populate({ a: {} }); + expect(subject.child("a").exists()).to.be.false; + + populate({ a: { child: null } }); + expect(subject.child("a").exists()).to.be.false; + + populate({ a: { child: {} } }); + expect(subject.child("a").exists()).to.be.false; + }); + + it("should be false for an empty array value", () => { + populate({ a: [] }); + expect(subject.child("a").exists()).to.be.false; + + populate({ a: [null] }); + expect(subject.child("a").exists()).to.be.false; + + populate({ a: [{}] }); + expect(subject.child("a").exists()).to.be.false; + }); + + it("should be true for a falsy value (other than null)", () => { + populate({ num: 0, bool: false, n: null }); + expect(subject.exists()).to.be.true; + expect(subject.child("num").exists()).to.be.true; + expect(subject.child("bool").exists()).to.be.true; + expect(subject.child("n").exists()).to.be.false; + expect(subject.child("missing").exists()).to.be.false; + }); + }); + + describe("#forEach(action: (a: DataSnapshot) => boolean): boolean", () => { + it("should iterate through child snapshots", () => { + populate({ a: "b", c: "d" }); + let out = ""; + subject.forEach((snap: any) => { + out += snap.val(); + }); + expect(out).to.equal("bd"); + }); + + it("should have correct key values for child snapshots", () => { + populate({ a: "b", c: "d" }); + let out = ""; + subject.forEach((snap: any) => { + out += snap.key; + }); + expect(out).to.equal("ac"); + }); + + it("should not execute for leaf or null nodes", () => { + populate(23); + let count = 0; + const counter = () => count++; + + expect(subject.forEach(counter)).to.equal(false); + expect(count).to.eq(0); + + populate({ + a: "foo", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + count = 0; + + expect(subject.forEach(counter)).to.equal(false); + expect(count).to.eq(1); + }); + + it("should cancel further enumeration if callback returns true", () => { + populate({ a: "b", c: "d", e: "f", g: "h" }); + let out = ""; + const ret = subject.forEach((snap: any) => { + if (snap.val() === "f") { + return true; + } + out += snap.val(); + }); + expect(out).to.equal("bd"); + expect(ret).to.equal(true); + }); + + it("should not cancel further enumeration if callback returns a truthy value", () => { + populate({ a: "b", c: "d", e: "f", g: "h" }); + let out = ""; + const ret = subject.forEach((snap: any) => { + out += snap.val(); + return 1; + }); + expect(out).to.equal("bdfh"); + expect(ret).to.equal(false); + }); + + it("should not cancel further enumeration if callback does not return", () => { + populate({ a: "b", c: "d", e: "f", g: "h" }); + let out = ""; + const ret = subject.forEach((snap: any) => { + out += snap.val(); + }); + expect(out).to.equal("bdfh"); + expect(ret).to.equal(false); + }); + }); + + describe("#numChildren()", () => { + it("should be key count for objects", () => { + populate({ + a: "b", + c: "d", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.numChildren()).to.eq(2); + }); + + it("should be 0 for non-objects", () => { + populate(23); + expect(subject.numChildren()).to.eq(0); + + populate({ + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.numChildren()).to.eq(0); + }); + }); + + describe("#hasChildren()", () => { + it("should true for objects", () => { + populate({ + a: "b", + c: "d", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.hasChildren()).to.be.true; + }); + + it("should be false for non-objects", () => { + populate(23); + expect(subject.hasChildren()).to.be.false; + + populate({ + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.hasChildren()).to.be.false; + }); + }); + + describe("#hasChild(childPath): boolean", () => { + it("should return true for a child or deep child", () => { + populate({ a: { b: "c" }, d: 23 }); + expect(subject.hasChild("a/b")).to.be.true; + expect(subject.hasChild("d")).to.be.true; + }); + + it("should return false if a child is missing", () => { + populate({ + a: "b", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.hasChild("c")).to.be.false; + expect(subject.hasChild("a/b")).to.be.false; + expect(subject.hasChild("nullChild")).to.be.false; + expect(subject.hasChild("emptyObjectChild")).to.be.false; + expect(subject.hasChild("emptyArrayChild")).to.be.false; + expect(subject.hasChild("c")).to.be.false; + expect(subject.hasChild("a/b")).to.be.false; + }); + }); + + describe("#key: string", () => { + it("should return the key name", () => { + expect(subject.key).to.equal("foo"); + }); + + it("should return null for the root", () => { + const [instance, path] = database.extractInstanceAndPath( + "projects/_/instances/foo/refs/", + undefined + ); + const snapshot = new database.DataSnapshot(null, path, getApp(), instance); + expect(snapshot.key).to.be.null; + }); + + it("should work for child paths", () => { + expect(subject.child("foo/bar").key).to.equal("bar"); + }); + }); + + describe("#toJSON(): Object", () => { + it("should return the current value", () => { + populate({ + a: "b", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(subject.toJSON()).to.deep.equal(subject.val()); + }); + + it("should be stringifyable", () => { + populate({ + a: "b", + nullChild: null, + emptyObjectChild: {}, + emptyArrayChild: [], + }); + expect(JSON.stringify(subject)).to.deep.equal('{"a":"b"}'); + }); + }); +}); diff --git a/spec/v1/providers/firestore.spec.ts b/spec/v1/providers/firestore.spec.ts new file mode 100644 index 000000000..f8f4288db --- /dev/null +++ b/spec/v1/providers/firestore.spec.ts @@ -0,0 +1,562 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { Timestamp } from "firebase-admin/firestore"; + +import * as functions from "../../../src/v1"; +import * as firestore from "../../../src/v1/providers/firestore"; +import { expectType } from "../../common/metaprogramming"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Firestore Functions", () => { + function constructValue(fields: any) { + return { + fields, + name: "projects/pid/databases/(default)/documents/collection/123", + createTime: "2017-06-02T18:48:58.920638Z", + updateTime: "2017-07-02T18:48:58.920638Z", + }; + } + + function makeEvent(data: any, context?: { [key: string]: any }) { + context = context || {}; + return { + data, + context: { + eventId: "123", + timestamp: "2018-07-03T00:49:04.264Z", + eventType: "google.firestore.document.create", + resource: { + name: "projects/myproj/databases/(default)/documents/tests/test1", + service: "service", + }, + ...context, + }, + }; + } + + function constructEvent(oldValue: object, value: object) { + return { + data: { + oldValue, + value, + }, + context: { + resource: { + name: "resource", + }, + }, + }; + } + + function createOldValue() { + return constructValue({ + key1: { + booleanValue: false, + }, + key2: { + integerValue: "111", + }, + }); + } + + function createValue() { + return constructValue({ + key1: { + booleanValue: true, + }, + key2: { + integerValue: "123", + }, + }); + } + + describe("document builders and event types", () => { + function expectedTrigger(resource: string, eventType: string) { + return { + eventTrigger: { + resource, + eventType: `providers/cloud.firestore/eventTypes/${eventType}`, + service: "firestore.googleapis.com", + }, + }; + } + + function expectedEndpoint(resource: string, eventType: string) { + return { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventFilters: { + resource, + }, + eventType: `providers/cloud.firestore/eventTypes/${eventType}`, + retry: false, + }, + labels: {}, + }; + } + + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should allow terse constructors", () => { + const resource = "projects/project1/databases/(default)/documents/users/{uid}"; + const cloudFunction = firestore.document("users/{uid}").onWrite((snap, context) => { + expectType<{ uid: string }>(context.params); + }); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(resource, "document.write")); + }); + + it("should allow custom namespaces", () => { + const resource = "projects/project1/databases/(default)/documents@v2/users/{uid}"; + const cloudFunction = firestore + .namespace("v2") + .document("users/{uid}") + .onWrite(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, "document.write")); + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(resource, "document.write")); + }); + + it("should allow custom namespaces", () => { + const resource = "projects/project1/databases/(default)/documents@v2/users/{uid}"; + const cloudFunction = firestore + .namespace("v2") + .document("users/{uid}") + .onWrite(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, "document.write")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(resource, "document.write")); + }); + + it("should allow custom databases", () => { + const resource = "projects/project1/databases/myDB/documents/users/{uid}"; + const cloudFunction = firestore + .database("myDB") + .document("users/{uid}") + .onWrite(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, "document.write")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(resource, "document.write")); + }); + + it("should allow both custom database and namespace", () => { + const resource = "projects/project1/databases/myDB/documents@v2/users/{uid}"; + const cloudFunction = firestore + .database("myDB") + .namespace("v2") + .document("users/{uid}") + .onWrite((snap, context) => { + expectType<{ uid: string }>(context.params); + }); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(resource, "document.write")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(resource, "document.write")); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .firestore.document("doc") + .onCreate((snap, context) => { + expectType>(context.params); + }); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + }); + + describe("process.env.GCLOUD_PROJECT not set", () => { + it("should not throw if __trigger is not accessed", () => { + expect(() => firestore.document("input").onCreate(() => null)).to.not.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => firestore.document("input").onCreate(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => firestore.document("input").onCreate(() => null).__endpoint).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = firestore.document("input").onCreate(() => null); + expect(cf.run).to.not.throw(Error); + }); + }); + + describe("dataConstructor", () => { + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it('constructs appropriate fields and getters for event.data on "document.write" events', () => { + const testFunction = firestore.document("path").onWrite((change) => { + expect(change.before.data()).to.deep.equal({ + key1: false, + key2: 111, + }); + expect(change.before.get("key1")).to.equal(false); + expect(change.after.data()).to.deep.equal({ key1: true, key2: 123 }); + expect(change.after.get("key1")).to.equal(true); + return true; // otherwise will get warning about returning undefined + }); + const event = constructEvent(createOldValue(), createValue()); + return testFunction(event.data, event.context); + }).timeout(5000); + + it('constructs appropriate fields and getters for event.data on "document.create" events', () => { + const testFunction = firestore.document("path").onCreate((data) => { + expect(data.data()).to.deep.equal({ key1: true, key2: 123 }); + expect(data.get("key1")).to.equal(true); + return true; // otherwise will get warning about returning undefined + }); + const event = constructEvent({}, createValue()); + return testFunction(event.data, event.context); + }).timeout(5000); + + it('constructs appropriate fields and getters for event.data on "document.update" events', () => { + const testFunction = firestore.document("path").onUpdate((change) => { + expect(change.before.data()).to.deep.equal({ + key1: false, + key2: 111, + }); + expect(change.before.get("key1")).to.equal(false); + expect(change.after.data()).to.deep.equal({ key1: true, key2: 123 }); + expect(change.after.get("key1")).to.equal(true); + return true; // otherwise will get warning about returning undefined + }); + const event = constructEvent(createOldValue(), createValue()); + return testFunction(event.data, event.context); + }).timeout(5000); + + it('constructs appropriate fields and getters for event.data on "document.delete" events', () => { + const testFunction = firestore.document("path").onDelete((data) => { + expect(data.data()).to.deep.equal({ key1: false, key2: 111 }); + expect(data.get("key1")).to.equal(false); + return true; // otherwise will get warning about returning undefined + }); + const event = constructEvent(createOldValue(), {}); + return testFunction(event.data, event.context); + }).timeout(5000); + }); + + describe("SnapshotConstructor", () => { + describe("#data()", () => { + it("should parse int values", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: constructValue({ key: { integerValue: "123" } }), + }) + ); + expect(snapshot.data()).to.deep.equal({ key: 123 }); + }); + + it("should parse double values", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: constructValue({ key: { doubleValue: 12.34 } }), + }) + ); + expect(snapshot.data()).to.deep.equal({ key: 12.34 }); + }); + + it("should parse null values", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: constructValue({ key: { nullValue: null } }), + }) + ); + expect(snapshot.data()).to.deep.equal({ key: null }); + }); + + it("should parse boolean values", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: constructValue({ key: { booleanValue: true } }), + }) + ); + expect(snapshot.data()).to.deep.equal({ key: true }); + }); + + it("should parse string values", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: constructValue({ key: { stringValue: "foo" } }), + }) + ); + expect(snapshot.data()).to.deep.equal({ key: "foo" }); + }); + + it("should parse array values", () => { + const raw = constructValue({ + key: { + arrayValue: { + values: [{ integerValue: "1" }, { integerValue: "2" }], + }, + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ key: [1, 2] }); + }); + + it("should parse object values", () => { + const raw = constructValue({ + keyParent: { + mapValue: { + fields: { + key1: { + stringValue: "val1", + }, + key2: { + stringValue: "val2", + }, + }, + }, + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ + keyParent: { key1: "val1", key2: "val2" }, + }); + }); + + it("should parse GeoPoint values", () => { + const raw = constructValue({ + geoPointValue: { + mapValue: { + fields: { + latitude: { + doubleValue: 40.73, + }, + longitude: { + doubleValue: -73.93, + }, + }, + }, + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ + geoPointValue: { + latitude: 40.73, + longitude: -73.93, + }, + }); + }); + + it("should parse reference values", () => { + const raw = constructValue({ + referenceVal: { + referenceValue: "projects/proj1/databases/(default)/documents/doc1/id", + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()?.referenceVal?.path).to.equal("doc1/id"); + }); + + it("should parse timestamp values with precision to the millisecond", () => { + const raw = constructValue({ + timestampVal: { + timestampValue: "2017-06-13T00:58:40.349Z", + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ + timestampVal: Timestamp.fromDate(new Date("2017-06-13T00:58:40.349Z")), + }); + }); + + it("should parse timestamp values with precision to the second", () => { + const raw = constructValue({ + timestampVal: { + timestampValue: "2017-06-13T00:58:40Z", + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ + timestampVal: Timestamp.fromDate(new Date("2017-06-13T00:58:40Z")), + }); + }); + + it("should parse binary values", () => { + // Format defined in https://developers.google.com/discovery/v1/type-format + const raw = constructValue({ + binaryVal: { + bytesValue: "Zm9vYmFy", + }, + }); + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: raw, + }) + ); + expect(snapshot.data()).to.deep.equal({ + binaryVal: Buffer.from("foobar"), + }); + }); + }); + + describe("Other DocumentSnapshot methods", () => { + let snapshot: FirebaseFirestore.DocumentSnapshot; + let newSnapshot: FirebaseFirestore.DocumentSnapshot; + + before(() => { + snapshot = firestore.snapshotConstructor( + makeEvent({ + value: { + fields: { key: { integerValue: "1" } }, + createTime: "2017-06-17T14:45:17.876479Z", + updateTime: "2017-08-31T18:05:26.928527Z", + readTime: "2017-07-31T18:23:26.928527Z", + name: "projects/pid/databases/(default)/documents/collection/123", + }, + }) + ); + newSnapshot = firestore.snapshotConstructor( + makeEvent({ + value: { + fields: { key: { integerValue: "2" } }, + createTime: "2017-06-17T14:45:17.876479Z", + updateTime: "2017-06-17T14:45:17.876479Z", + name: "projects/pid/databases/(default)/documents/collection/124", + }, + }) + ); + }); + + it("should support #exists", () => { + expect(snapshot.exists).to.be.true; + }); + + it("should support #ref", () => { + expect(snapshot.ref.path).to.equal("collection/123"); + }); + + it("should support #id", () => { + expect(snapshot.id).to.equal("123"); + }); + + it("should support #createTime", () => { + expect(snapshot.createTime.seconds).to.be.a("number"); + expect(snapshot.createTime.nanoseconds).to.be.a("number"); + }); + + it("should support #updateTime", () => { + expect(snapshot.updateTime.seconds).to.be.a("number"); + expect(snapshot.updateTime.nanoseconds).to.be.a("number"); + }); + + it("should support #readTime", () => { + expect(snapshot.readTime.seconds).to.be.a("number"); + expect(snapshot.readTime.nanoseconds).to.be.a("number"); + expect(newSnapshot.readTime.seconds).to.be.a("number"); + expect(newSnapshot.readTime.nanoseconds).to.be.a("number"); + }); + }); + + describe("Handle empty and non-existent documents", () => { + it("constructs non-existent DocumentSnapshot when whole document deleted", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent( + { + value: {}, // value is empty when the whole document is deleted + }, + { + resource: { + name: "projects/pid/databases/(default)/documents/collection/123", + }, + } + ) + ); + expect(snapshot.exists).to.be.false; + expect(snapshot.ref.path).to.equal("collection/123"); + }); + + it("constructs existent DocumentSnapshot with empty data when all fields of document deleted", () => { + const snapshot = firestore.snapshotConstructor( + makeEvent({ + value: { + // value is not empty when document still exists + createTime: "2017-06-02T18:48:58.920638Z", + updateTime: "2017-07-02T18:48:58.920638Z", + name: "projects/pid/databases/(default)/documents/collection/123", + }, + }) + ); + expect(snapshot.exists).to.be.true; + expect(snapshot.ref.path).to.equal("collection/123"); + expect(snapshot.data()).to.deep.equal({}); + expect(snapshot.get("key1")).to.equal(undefined); + }); + }); + }); +}); diff --git a/spec/v1/providers/fixtures.ts b/spec/v1/providers/fixtures.ts new file mode 100644 index 000000000..e047ba07f --- /dev/null +++ b/spec/v1/providers/fixtures.ts @@ -0,0 +1,50 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import { ManifestEndpoint } from "../../../src/runtime/manifest"; +import * as functions from "../../../src/v1"; +import * as options from "../../../src/v2/options"; + +export const MINIMIAL_TASK_QUEUE_TRIGGER: ManifestEndpoint["taskQueueTrigger"] = { + rateLimits: { + maxConcurrentDispatches: functions.RESET_VALUE, + maxDispatchesPerSecond: functions.RESET_VALUE, + }, + retryConfig: { + maxAttempts: functions.RESET_VALUE, + maxBackoffSeconds: functions.RESET_VALUE, + maxDoublings: functions.RESET_VALUE, + maxRetrySeconds: functions.RESET_VALUE, + minBackoffSeconds: functions.RESET_VALUE, + }, +}; + +export const MINIMAL_SCHEDULE_TRIGGER: ManifestEndpoint["scheduleTrigger"] = { + schedule: "", + timeZone: options.RESET_VALUE, + retryConfig: { + retryCount: options.RESET_VALUE, + maxRetryDuration: options.RESET_VALUE, + maxBackoffDuration: options.RESET_VALUE, + minBackoffDuration: options.RESET_VALUE, + maxDoublings: options.RESET_VALUE, + }, +}; diff --git a/spec/v1/providers/https.spec.ts b/spec/v1/providers/https.spec.ts new file mode 100644 index 000000000..08dd53d7d --- /dev/null +++ b/spec/v1/providers/https.spec.ts @@ -0,0 +1,284 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import * as functions from "../../../src/v1"; +import * as https from "../../../src/v1/providers/https"; +import * as debug from "../../../src/common/debug"; +import * as sinon from "sinon"; +import { + expectedResponseHeaders, + generateUnsignedIdToken, + MockRequest, + mockRequest, +} from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; +import { CALLABLE_AUTH_HEADER, ORIGINAL_AUTH_HEADER } from "../../../src/common/providers/https"; +import { onInit } from "../../../src/v1"; + +describe("CloudHttpsBuilder", () => { + describe("#onRequest", () => { + it("should return a trigger with appropriate values", () => { + const result = https.onRequest((req, resp) => { + resp.send(200); + }); + expect(result.__trigger).to.deep.equal({ httpsTrigger: {} }); + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + httpsTrigger: {}, + }); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + invoker: "private", + }) + .https.onRequest(() => null); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + expect(fn.__trigger.httpsTrigger.invoker).to.deep.equal(["private"]); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + expect(fn.__endpoint.httpsTrigger.invoker).to.deep.equal(["private"]); + }); + + it("calls init function", async () => { + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + const fn = functions.https.onRequest((_req, res) => { + res.send(200); + }); + const req = new MockRequest( + { + data: { foo: "bar" }, + }, + { + "content-type": "application/json", + } + ); + req.method = "POST"; + await runHandler(fn, req as any); + expect(hello).to.equal("world"); + }); + }); +}); + +describe("#onCall", () => { + afterEach(() => { + sinon.verifyAndRestore(); + }); + + it("should return a trigger/endpoint with appropriate values", () => { + const result = https.onCall(() => { + return "response"; + }); + + expect(result.__trigger).to.deep.equal({ + httpsTrigger: {}, + labels: { "deployment-callable": "true" }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + callableTrigger: {}, + labels: {}, + }); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .https.onCall(() => null); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("has a .run method", async () => { + const cf = https.onCall((d, c) => { + return { data: d, context: c }; + }); + + const data = "data"; + const context = { + instanceIdToken: "token", + auth: { + uid: "abc", + token: "token", + }, + }; + + await expect(cf.run(data, context)).to.eventually.deep.equal({ data, context }); + }); + + // Regression test for firebase-functions#947 + it("should lock to the v1 API even with function.length == 1", async () => { + let gotData: Record; + const func = https.onCall((data) => { + gotData = data; + }); + + const req = new MockRequest( + { + data: { foo: "bar" }, + }, + { + "content-type": "application/json", + } + ); + req.method = "POST"; + + const response = await runHandler(func, req as any); + expect(response.status).to.equal(200); + expect(gotData).to.deep.equal({ foo: "bar" }); + }); + + it("should call initializer", async () => { + const func = https.onCall(() => null); + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + } + ); + req.method = "POST"; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req as any); + expect(hello).to.equal("world"); + }); + + // Test for firebase-tools#5210 + it("should create context.auth for v1 emulated functions", async () => { + sinon.stub(debug, "isDebugFeatureEnabled").withArgs("skipTokenVerification").returns(true); + + let gotData: Record; + let gotContext: Record; + const rawToken = generateUnsignedIdToken("123456"); + const reqData = { hello: "world" }; + const authContext = { + uid: "SomeUID", + token: { + aud: "123456", + sub: "SomeUID", + uid: "SomeUID", + }, + rawToken, + }; + const originalAuth = "Bearer " + rawToken; + const func = https.onCall((data, context) => { + gotData = data; + gotContext = context; + }); + const mockReq = mockRequest( + reqData, + "application/json", + {}, + { + [CALLABLE_AUTH_HEADER]: encodeURIComponent(JSON.stringify(authContext)), + [ORIGINAL_AUTH_HEADER]: originalAuth, + } + ); + + const response = await runHandler(func, mockReq as any); + + expect(response.status).to.equal(200); + expect(gotData).to.deep.eq(reqData); + expect(gotContext.rawRequest).to.deep.eq(mockReq); + expect(gotContext.rawRequest.headers["authorization"]).to.eq(originalAuth); + expect(gotContext.auth).to.deep.eq(authContext); + }); +}); + +describe("callable CORS", () => { + it("handles OPTIONS preflight", async () => { + const func = https.onCall(() => { + throw new Error(`This shouldn't have gotten called for an OPTIONS preflight.`); + }); + + const req = new MockRequest( + {}, + { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + Origin: "example.com", + } + ); + req.method = "OPTIONS"; + + const response = await runHandler(func, req as any); + + expect(response.status).to.equal(204); + expect(response.body).to.be.undefined; + expect(response.headers).to.deep.equal({ + "Access-Control-Allow-Methods": "POST", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + }); + + it("adds CORS headers", async () => { + const func = https.onCall(() => 42); + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + origin: "example.com", + } + ); + req.method = "POST"; + + const response = await runHandler(func, req as any); + + expect(response.status).to.equal(200); + expect(response.body).to.be.deep.equal(JSON.stringify({ result: 42 })); + expect(response.headers).to.deep.equal(expectedResponseHeaders); + }); +}); diff --git a/spec/v1/providers/httpsAsync.spec.ts b/spec/v1/providers/httpsAsync.spec.ts new file mode 100644 index 000000000..84fcf3a59 --- /dev/null +++ b/spec/v1/providers/httpsAsync.spec.ts @@ -0,0 +1,49 @@ +import { expect } from "chai"; +import * as sinon from "sinon"; +import * as https from "../../../src/v1/providers/https"; +import * as logger from "../../../src/logger"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; + +describe("CloudHttpsBuilder async onRequest", () => { + let loggerSpy: sinon.SinonSpy; + + beforeEach(() => { + loggerSpy = sinon.spy(logger, "error"); + }); + + afterEach(() => { + loggerSpy.restore(); + }); + + it("should catch and log unhandled rejections in async onRequest handlers", async () => { + const err = new Error("boom"); + const fn = https.onRequest(async (_req, _res) => { + await Promise.resolve(); + throw err; + }); + + const req = new MockRequest({}, {}); + req.method = "GET"; + + const result = await runHandler(fn, req as any); + + expect(loggerSpy.calledWith("Unhandled error", err)).to.be.true; + expect(result.status).to.equal(500); + expect(result.body).to.equal("Internal Server Error"); + }); + + it("should not log if handler completes successfully", async () => { + const fn = https.onRequest(async (_req, res) => { + await Promise.resolve(); + res.send(200); + }); + + const req = new MockRequest({}, {}); + req.method = "GET"; + + await runHandler(fn, req as any); + + expect(loggerSpy.called).to.be.false; + }); +}); diff --git a/spec/v1/providers/pubsub.spec.ts b/spec/v1/providers/pubsub.spec.ts new file mode 100644 index 000000000..0a7b89ad6 --- /dev/null +++ b/spec/v1/providers/pubsub.spec.ts @@ -0,0 +1,438 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import { Event, RESET_VALUE } from "../../../src/v1"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; +import { MINIMAL_SCHEDULE_TRIGGER } from "./fixtures"; +import * as functions from "../../../src/v1"; +import * as pubsub from "../../../src/v1/providers/pubsub"; + +describe("Pubsub Functions", () => { + describe("pubsub.Message", () => { + describe("#json", () => { + it("should return json decoded from base64", () => { + const message = new pubsub.Message({ + data: new Buffer('{"hello":"world"}', "utf8").toString("base64"), + }); + + expect(message.json.hello).to.equal("world"); + }); + + it("should preserve passed in json", () => { + const message = new pubsub.Message({ + data: new Buffer('{"hello":"world"}', "utf8").toString("base64"), + json: { goodbye: "world" }, + }); + + expect(message.json.goodbye).to.equal("world"); + }); + }); + + describe("#toJSON", () => { + it("should be JSON stringify-able", () => { + const encoded = new Buffer('{"hello":"world"}', "utf8").toString("base64"); + const message = new pubsub.Message({ + data: encoded, + }); + + expect(JSON.parse(JSON.stringify(message))).to.deep.equal({ + data: encoded, + attributes: {}, + }); + }); + }); + }); + + describe("pubsub.FunctionBuilder", () => { + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .pubsub.topic("toppy") + .onPublish(() => null); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + describe("#onPublish", () => { + it("should return a trigger/endpoint with appropriate values", () => { + // Pick up project from process.env.GCLOUD_PROJECT + const result = pubsub.topic("toppy").onPublish(() => null); + + expect(result.__trigger).to.deep.equal({ + eventTrigger: { + eventType: "google.pubsub.topic.publish", + resource: "projects/project1/topics/toppy", + service: "pubsub.googleapis.com", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: { + resource: "projects/project1/topics/toppy", + }, + retry: false, + }, + labels: {}, + }); + }); + + it("should throw with improperly formatted topics", () => { + expect(() => pubsub.topic("bad/topic/format")).to.throw(Error); + }); + + it("should properly handle a new-style event", () => { + const raw = new Buffer('{"hello":"world"}', "utf8").toString("base64"); + const event: Event = { + data: { + data: raw, + attributes: { + foo: "bar", + }, + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.pubsub.topic.publish", + resource: { + service: "pubsub.googleapis.com", + name: "projects/project1/topics/toppy", + }, + }, + }; + + const result = pubsub.topic("toppy").onPublish((data) => { + return { + raw: data.data, + json: data.json, + attributes: data.attributes, + }; + }); + + return expect(result(event.data, event.context)).to.eventually.deep.equal({ + raw, + json: { hello: "world" }, + attributes: { foo: "bar" }, + }); + }); + }); + + describe("#schedule", () => { + it("should return a trigger/endpoint with schedule", () => { + const result = pubsub.schedule("every 5 minutes").onRun(() => null); + + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + }); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + }); + }); + + it("should return a trigger/endpoint with schedule and timeZone when one is chosen", () => { + const result = pubsub + .schedule("every 5 minutes") + .timeZone("America/New_York") + .onRun(() => null); + + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + timeZone: "America/New_York", + }); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + timeZone: "America/New_York", + }); + }); + + it("should return a trigger/endpoint with schedule and retry config when called with retryConfig", () => { + const retryConfig = { + retryCount: 3, + maxRetryDuration: "10 minutes", + minBackoffDuration: "10 minutes", + maxBackoffDuration: "10 minutes", + maxDoublings: 5, + }; + const result = pubsub + .schedule("every 5 minutes") + .retryConfig(retryConfig) + .onRun(() => null); + + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + retryConfig, + }); + expect(result.__trigger.labels).to.deep.equal({ + "deployment-scheduled": "true", + }); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + retryConfig, + }); + expect(result.__endpoint.labels).to.be.empty; + }); + + it( + "should return a trigger/endpoint with schedule, timeZone and retry config" + + "when called with retryConfig and timeout", + () => { + const retryConfig = { + retryCount: 3, + maxRetryDuration: "10 minutes", + minBackoffDuration: "10 minutes", + maxBackoffDuration: "10 minutes", + maxDoublings: 5, + }; + const result = pubsub + .schedule("every 5 minutes") + .timeZone("America/New_York") + .retryConfig(retryConfig) + .onRun(() => null); + + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + retryConfig, + timeZone: "America/New_York", + }); + expect(result.__trigger.labels).to.deep.equal({ + "deployment-scheduled": "true", + }); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + retryConfig, + timeZone: "America/New_York", + }); + expect(result.__endpoint.labels).to.be.empty; + } + ); + + it("should return an appropriate trigger/endpoint when called with region and options", () => { + const result = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .pubsub.schedule("every 5 minutes") + .onRun(() => null); + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + }); + expect(result.__trigger.regions).to.deep.equal(["us-east1"]); + expect(result.__trigger.availableMemoryMb).to.deep.equal(256); + expect(result.__trigger.timeout).to.deep.equal("90s"); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + }); + expect(result.__endpoint.region).to.deep.equal(["us-east1"]); + expect(result.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(result.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should return an appropriate trigger/endpoint when called with region, timeZone, and options", () => { + const result = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .pubsub.schedule("every 5 minutes") + .timeZone("America/New_York") + .onRun(() => null); + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + timeZone: "America/New_York", + }); + expect(result.__trigger.regions).to.deep.equal(["us-east1"]); + expect(result.__trigger.availableMemoryMb).to.deep.equal(256); + expect(result.__trigger.timeout).to.deep.equal("90s"); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + timeZone: "America/New_York", + }); + expect(result.__endpoint.region).to.deep.equal(["us-east1"]); + expect(result.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(result.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should return an appropriate trigger/endpoint when called with region, options and retryConfig", () => { + const retryConfig = { + retryCount: 3, + maxRetryDuration: "10 minutes", + minBackoffDuration: "10 minutes", + maxBackoffDuration: "10 minutes", + maxDoublings: 5, + }; + const result = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .pubsub.schedule("every 5 minutes") + .retryConfig(retryConfig) + .onRun(() => null); + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + retryConfig, + }); + expect(result.__trigger.labels).to.deep.equal({ + "deployment-scheduled": "true", + }); + expect(result.__trigger.regions).to.deep.equal(["us-east1"]); + expect(result.__trigger.availableMemoryMb).to.deep.equal(256); + expect(result.__trigger.timeout).to.deep.equal("90s"); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + timeZone: RESET_VALUE, + retryConfig, + }); + expect(result.__endpoint.region).to.deep.equal(["us-east1"]); + expect(result.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(result.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should return an appropriate trigger/endpoint when called with region, options, retryConfig, and timeZone", () => { + const retryConfig = { + retryCount: 3, + maxRetryDuration: "10 minutes", + minBackoffDuration: "10 minutes", + maxBackoffDuration: "10 minutes", + maxDoublings: 5, + }; + const result = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .pubsub.schedule("every 5 minutes") + .timeZone("America/New_York") + .retryConfig(retryConfig) + .onRun(() => null); + expect(result.__trigger.schedule).to.deep.equal({ + schedule: "every 5 minutes", + timeZone: "America/New_York", + retryConfig, + }); + expect(result.__trigger.labels).to.deep.equal({ + "deployment-scheduled": "true", + }); + expect(result.__trigger.regions).to.deep.equal(["us-east1"]); + expect(result.__trigger.availableMemoryMb).to.deep.equal(256); + expect(result.__trigger.timeout).to.deep.equal("90s"); + + expect(result.__endpoint.scheduleTrigger).to.deep.equal({ + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "every 5 minutes", + timeZone: "America/New_York", + retryConfig, + }); + expect(result.__endpoint.region).to.deep.equal(["us-east1"]); + expect(result.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(result.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + it("should return an appropriate endpoint when called with preserveExternalChanges", () => { + const result = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + preserveExternalChanges: true, + }) + .pubsub.schedule("every 5 minutes") + .timeZone("America/New_York") + .onRun(() => null); + + expect(result.__endpoint).to.deep.eq({ + platform: "gcfv1", + labels: {}, + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + scheduleTrigger: { + schedule: "every 5 minutes", + timeZone: "America/New_York", + retryConfig: {}, + }, + }); + }); + }); + }); + + describe("process.env.GCLOUD_PROJECT not set", () => { + it("should not throw if __trigger is not accessed", () => { + expect(() => pubsub.topic("toppy").onPublish(() => null)).to.not.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => pubsub.topic("toppy").onPublish(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => pubsub.topic("toppy").onPublish(() => null).__endpoint).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = pubsub.topic("toppy").onPublish(() => null); + expect(cf.run).to.not.throw(Error); + }); + }); +}); diff --git a/spec/v1/providers/remoteConfig.spec.ts b/spec/v1/providers/remoteConfig.spec.ts new file mode 100644 index 000000000..e207b5de3 --- /dev/null +++ b/spec/v1/providers/remoteConfig.spec.ts @@ -0,0 +1,135 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import { expect } from "chai"; + +import * as functions from "../../../src/v1"; +import { CloudFunction, Event } from "../../../src/v1/cloud-functions"; +import * as remoteConfig from "../../../src/v1/providers/remoteConfig"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("RemoteConfig Functions", () => { + function constructVersion() { + return { + versionNumber: 1, + updateTime: "2017-07-02T18:48:58.920638Z", + updateUser: { + name: "Foo Bar", + email: "foobar@gmail.com", + }, + description: "test description", + updateOrigin: "CONSOLE", + updateType: "INCREMENTAL_UPDATE", + }; + } + + describe("#onUpdate", () => { + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should have the correct trigger", () => { + const cloudFunction = remoteConfig.onUpdate(() => null); + + expect(cloudFunction.__trigger).to.deep.equal({ + eventTrigger: { + resource: "projects/project1", + eventType: "google.firebase.remoteconfig.update", + service: "firebaseremoteconfig.googleapis.com", + }, + }); + + expect(cloudFunction.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "google.firebase.remoteconfig.update", + eventFilters: { + resource: "projects/project1", + }, + retry: false, + }, + labels: {}, + }); + }); + + it("should allow both region and runtime options to be set", () => { + const cloudFunction = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .remoteConfig.onUpdate(() => null); + + expect(cloudFunction.__trigger.regions).to.deep.equal(["us-east1"]); + expect(cloudFunction.__trigger.availableMemoryMb).to.deep.equal(256); + expect(cloudFunction.__trigger.timeout).to.deep.equal("90s"); + + expect(cloudFunction.__endpoint.region).to.deep.equal(["us-east1"]); + expect(cloudFunction.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(cloudFunction.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + }); + + describe("unwraps TemplateVersion", () => { + let cloudFunctionUpdate: CloudFunction; + let event: Event; + + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + cloudFunctionUpdate = remoteConfig.onUpdate( + (version: remoteConfig.TemplateVersion) => version + ); + + event = { + data: constructVersion(), + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.firebase.remoteconfig.update", + resource: { + service: "firebaseremoteconfig.googleapis.com", + name: "projects/project1", + }, + }, + }; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should unwrap the version in the event", async () => { + let hello; + functions.onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await cloudFunctionUpdate(event.data, event.context).then((data: any) => { + expect(data).to.deep.equal(constructVersion()); + }); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v1/providers/storage.spec.ts b/spec/v1/providers/storage.spec.ts new file mode 100644 index 000000000..77f8610fc --- /dev/null +++ b/spec/v1/providers/storage.spec.ts @@ -0,0 +1,394 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { Event } from "../../../src/v1"; +import * as config from "../../../src/common/config"; +import * as functions from "../../../src/v1"; +import * as storage from "../../../src/v1/providers/storage"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Storage Functions", () => { + describe("ObjectBuilder", () => { + function expectedTrigger(bucket: string, eventType: string) { + return { + eventTrigger: { + resource: `projects/_/buckets/${bucket}`, + eventType: `google.storage.object.${eventType}`, + service: "storage.googleapis.com", + }, + }; + } + + function expectedEndpoint(bucket: string, eventType: string) { + return { + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventFilters: { + resource: `projects/_/buckets/${bucket}`, + }, + eventType: `google.storage.object.${eventType}`, + retry: false, + }, + labels: {}, + }; + } + + const defaultBucket = "bucket"; + + before(() => { + config.resetCache({ + storageBucket: defaultBucket, + }); + }); + + after(() => { + config.resetCache(); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .storage.object() + .onArchive(() => null); + + expect(fn.__trigger.regions).to.deep.equal(["us-east1"]); + expect(fn.__trigger.availableMemoryMb).to.deep.equal(256); + expect(fn.__trigger.timeout).to.deep.equal("90s"); + + expect(fn.__endpoint.region).to.deep.equal(["us-east1"]); + expect(fn.__endpoint.availableMemoryMb).to.deep.equal(256); + expect(fn.__endpoint.timeoutSeconds).to.deep.equal(90); + }); + + describe("#onArchive", () => { + it("should return a TriggerDefinition with appropriate values", () => { + const cloudFunction = storage + .bucket("bucky") + .object() + .onArchive(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger("bucky", "archive")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint("bucky", "archive")); + }); + + it("should use the default bucket when none is provided", () => { + const cloudFunction = storage.object().onArchive(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(defaultBucket, "archive")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(defaultBucket, "archive")); + }); + + it("should allow fully qualified bucket names", () => { + const subjectQualified = new storage.ObjectBuilder(() => "projects/_/buckets/bucky", {}); + const result = subjectQualified.onArchive(() => null); + + expect(result.__trigger).to.deep.equal(expectedTrigger("bucky", "archive")); + + expect(result.__endpoint).to.deep.equal(expectedEndpoint("bucky", "archive")); + }); + + it("should throw with improperly formatted buckets", () => { + expect( + () => + storage + .bucket("bad/bucket/format") + .object() + .onArchive(() => null).__trigger + ).to.throw(Error); + + expect( + () => + storage + .bucket("bad/bucket/format") + .object() + .onArchive(() => null).__endpoint + ).to.throw(Error); + }); + + it("should not mess with media links using non-literal slashes", () => { + const cloudFunction = storage.object().onArchive((data) => { + return data.mediaLink; + }); + const goodMediaLinkEvent: Event = { + data: { + mediaLink: + "https://www.googleapis.com/storage/v1/b/mybucket.appspot.com" + + "/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media", + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.storage.object.archive", + resource: { + service: "storage.googleapis.com", + name: "projects/_/buckets/bucky", + }, + }, + }; + return cloudFunction(goodMediaLinkEvent.data, goodMediaLinkEvent.context).then( + (result: any) => { + expect(result).equals(goodMediaLinkEvent.data.mediaLink); + } + ); + }); + }); + + describe("#onDelete", () => { + it("should return a TriggerDefinition with appropriate values", () => { + const cloudFunction = storage + .bucket("bucky") + .object() + .onDelete(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger("bucky", "delete")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint("bucky", "delete")); + }); + + it("should use the default bucket when none is provided", () => { + const cloudFunction = storage.object().onDelete(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(defaultBucket, "delete")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(defaultBucket, "delete")); + }); + + it("should allow fully qualified bucket names", () => { + const subjectQualified = new storage.ObjectBuilder(() => "projects/_/buckets/bucky", {}); + const result = subjectQualified.onDelete(() => null); + + expect(result.__trigger).to.deep.equal(expectedTrigger("bucky", "delete")); + + expect(result.__endpoint).to.deep.equal(expectedEndpoint("bucky", "delete")); + }); + + it("should throw with improperly formatted buckets", () => { + const fn = storage + .bucket("bad/bucket/format") + .object() + .onDelete(() => null); + + expect(() => fn.__trigger).to.throw(Error); + + expect(() => fn.__endpoint).to.throw(Error); + }); + + it("should not mess with media links using non-literal slashes", () => { + const cloudFunction = storage.object().onDelete((data) => { + return data.mediaLink; + }); + const goodMediaLinkEvent = { + data: { + mediaLink: + "https://www.googleapis.com/storage/v1/b/mybucket.appspot.com" + + "/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media", + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.storage.object.delete", + resource: { + service: "storage.googleapis.com", + name: "projects/_/buckets/bucky", + }, + }, + }; + return cloudFunction(goodMediaLinkEvent.data, goodMediaLinkEvent.context).then( + (result: any) => { + expect(result).equals(goodMediaLinkEvent.data.mediaLink); + } + ); + }); + }); + + describe("#onFinalize", () => { + it("should return a TriggerDefinition with appropriate values", () => { + const cloudFunction = storage + .bucket("bucky") + .object() + .onFinalize(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger("bucky", "finalize")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint("bucky", "finalize")); + }); + + it("should use the default bucket when none is provided", () => { + const cloudFunction = storage.object().onFinalize(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger(defaultBucket, "finalize")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint(defaultBucket, "finalize")); + }); + + it("should allow fully qualified bucket names", () => { + const subjectQualified = new storage.ObjectBuilder(() => "projects/_/buckets/bucky", {}); + const result = subjectQualified.onFinalize(() => null); + + expect(result.__trigger).to.deep.equal(expectedTrigger("bucky", "finalize")); + + expect(result.__endpoint).to.deep.equal(expectedEndpoint("bucky", "finalize")); + }); + + it("should throw with improperly formatted buckets", () => { + const fn = storage + .bucket("bad/bucket/format") + .object() + .onFinalize(() => null); + + expect(() => fn.__trigger).to.throw(Error); + + expect(() => fn.__endpoint).to.throw(Error); + }); + + it("should not mess with media links using non-literal slashes", () => { + const cloudFunction = storage.object().onFinalize((data) => { + return data.mediaLink; + }); + const goodMediaLinkEvent = { + data: { + mediaLink: + "https://www.googleapis.com/storage/v1/b/mybucket.appspot.com" + + "/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media", + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.storage.object.finalize", + resource: { + service: "storage.googleapis.com", + name: "projects/_/buckets/bucky", + }, + }, + }; + return cloudFunction(goodMediaLinkEvent.data, goodMediaLinkEvent.context).then( + (result: any) => { + expect(result).equals(goodMediaLinkEvent.data.mediaLink); + } + ); + }); + }); + + describe("#onMetadataUpdate", () => { + it("should return a TriggerDefinition with appropriate values", () => { + const cloudFunction = storage + .bucket("bucky") + .object() + .onMetadataUpdate(() => null); + + expect(cloudFunction.__trigger).to.deep.equal(expectedTrigger("bucky", "metadataUpdate")); + + expect(cloudFunction.__endpoint).to.deep.equal(expectedEndpoint("bucky", "metadataUpdate")); + }); + + it("should use the default bucket when none is provided", () => { + const cloudFunction = storage.object().onMetadataUpdate(() => null); + + expect(cloudFunction.__trigger).to.deep.equal( + expectedTrigger(defaultBucket, "metadataUpdate") + ); + + expect(cloudFunction.__endpoint).to.deep.equal( + expectedEndpoint(defaultBucket, "metadataUpdate") + ); + }); + + it("should allow fully qualified bucket names", () => { + const subjectQualified = new storage.ObjectBuilder(() => "projects/_/buckets/bucky", {}); + const result = subjectQualified.onMetadataUpdate(() => null); + + expect(result.__trigger).to.deep.equal(expectedTrigger("bucky", "metadataUpdate")); + + expect(result.__endpoint).to.deep.equal(expectedEndpoint("bucky", "metadataUpdate")); + }); + + it("should throw with improperly formatted buckets", () => { + const fn = storage + .bucket("bad/bucket/format") + .object() + .onMetadataUpdate(() => null); + + expect(() => fn.__trigger).to.throw(Error); + expect(() => fn.__endpoint).to.throw(Error); + }); + + it("should not mess with media links using non-literal slashes", () => { + const cloudFunction = storage.object().onMetadataUpdate((data) => { + return data.mediaLink; + }); + const goodMediaLinkEvent = { + data: { + mediaLink: + "https://www.googleapis.com/storage/v1/b/mybucket.appspot.com" + + "/o/nestedfolder%2Fanotherfolder%2Fmyobject.file?generation=12345&alt=media", + }, + context: { + eventId: "70172329041928", + timestamp: "2018-04-09T07:56:12.975Z", + eventType: "google.storage.object.metadataUpdate", + resource: { + service: "storage.googleapis.com", + name: "projects/_/buckets/bucky", + }, + }, + }; + return cloudFunction(goodMediaLinkEvent.data, goodMediaLinkEvent.context).then( + (result: any) => { + expect(result).equals(goodMediaLinkEvent.data.mediaLink); + } + ); + }); + }); + }); + + describe("process.env.FIREBASE_CONFIG not set", () => { + beforeEach(() => { + (config as any).firebaseConfigCache = null; + delete process.env.FIREBASE_CONFIG; + }); + + it("should not throw if __trigger is not accessed", () => { + expect(() => storage.object().onArchive(() => null)).to.not.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => storage.object().onArchive(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => storage.object().onArchive(() => null).__endpoint).to.throw(Error); + }); + + it("should not throw when #run is called", () => { + const cf = storage.object().onArchive(() => null); + expect(cf.run).to.not.throw(Error); + }); + }); +}); diff --git a/spec/v1/providers/tasks.spec.ts b/spec/v1/providers/tasks.spec.ts new file mode 100644 index 000000000..c6c2eca9d --- /dev/null +++ b/spec/v1/providers/tasks.spec.ts @@ -0,0 +1,204 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import * as functions from "../../../src/v1"; +import { taskQueue } from "../../../src/v1/providers/tasks"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; +import { MINIMIAL_TASK_QUEUE_TRIGGER } from "./fixtures"; +import { runWith } from "../../../src/v1"; + +describe("#onDispatch", () => { + it("should return a trigger/endpoint with appropriate values", () => { + const result = taskQueue({ + rateLimits: { + maxConcurrentDispatches: 30, + maxDispatchesPerSecond: 40, + }, + retryConfig: { + maxAttempts: 5, + maxRetrySeconds: 10, + maxBackoffSeconds: 20, + maxDoublings: 3, + minBackoffSeconds: 5, + }, + invoker: "private", + }).onDispatch(() => undefined); + + expect(result.__trigger).to.deep.equal({ + taskQueueTrigger: { + rateLimits: { + maxConcurrentDispatches: 30, + maxDispatchesPerSecond: 40, + }, + retryConfig: { + maxAttempts: 5, + maxRetrySeconds: 10, + maxBackoffSeconds: 20, + maxDoublings: 3, + minBackoffSeconds: 5, + }, + invoker: ["private"], + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + taskQueueTrigger: { + ...MINIMIAL_TASK_QUEUE_TRIGGER, + rateLimits: { + maxConcurrentDispatches: 30, + maxDispatchesPerSecond: 40, + }, + retryConfig: { + maxAttempts: 5, + maxRetrySeconds: 10, + maxBackoffSeconds: 20, + maxDoublings: 3, + minBackoffSeconds: 5, + }, + invoker: ["private"], + }, + }); + }); + + it("should return an endpoint with appropriate values with preserveExternalChanges set", () => { + const result = runWith({ preserveExternalChanges: true }) + .tasks.taskQueue({ + rateLimits: { + maxConcurrentDispatches: 30, + }, + retryConfig: { + maxAttempts: 5, + maxRetrySeconds: 10, + }, + invoker: "private", + }) + .onDispatch(() => undefined); + + expect(result.__endpoint).to.deep.equal({ + platform: "gcfv1", + taskQueueTrigger: { + rateLimits: { + maxConcurrentDispatches: 30, + }, + retryConfig: { + maxAttempts: 5, + maxRetrySeconds: 10, + }, + invoker: ["private"], + }, + }); + }); + + it("should allow both region and runtime options to be set", () => { + const fn = functions + .region("us-east1") + .runWith({ + timeoutSeconds: 90, + memory: "256MB", + }) + .tasks.taskQueue({ retryConfig: { maxAttempts: 5 } }) + .onDispatch(() => null); + + expect(fn.__trigger).to.deep.equal({ + regions: ["us-east1"], + availableMemoryMb: 256, + timeout: "90s", + taskQueueTrigger: { + retryConfig: { + maxAttempts: 5, + }, + }, + }); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + region: ["us-east1"], + availableMemoryMb: 256, + timeoutSeconds: 90, + taskQueueTrigger: { + ...MINIMIAL_TASK_QUEUE_TRIGGER, + retryConfig: { + maxAttempts: 5, + maxBackoffSeconds: functions.RESET_VALUE, + maxDoublings: functions.RESET_VALUE, + maxRetrySeconds: functions.RESET_VALUE, + minBackoffSeconds: functions.RESET_VALUE, + }, + }, + }); + }); + + it("has a .run method", async () => { + const data = "data"; + const context = { + auth: { + uid: "abc", + token: "token" as any, + rawToken: "abc123", + }, + queueName: "fn", + id: "task0", + retryCount: 0, + executionCount: 0, + scheduledTime: "timestamp", + }; + let done = false; + const cf = taskQueue().onDispatch((d, c) => { + expect(d).to.equal(data); + expect(c).to.deep.equal(context); + done = true; + }); + + await cf.run(data, context); + expect(done).to.be.true; + }); + + // Regression test for firebase-functions#947 + it("should lock to the v1 API even with function.length == 1", async () => { + let gotData: Record; + const func = taskQueue().onDispatch((data) => { + gotData = data; + }); + + const req = new MockRequest( + { + data: { foo: "bar" }, + }, + { + "content-type": "application/json", + authorization: "Bearer abc", + } + ); + req.method = "POST"; + + const response = await runHandler(func, req as any); + expect(response.status).to.equal(204); + expect(gotData).to.deep.equal({ foo: "bar" }); + }); +}); diff --git a/spec/v1/providers/testLab.spec.ts b/spec/v1/providers/testLab.spec.ts new file mode 100644 index 000000000..ba8bfc27a --- /dev/null +++ b/spec/v1/providers/testLab.spec.ts @@ -0,0 +1,266 @@ +// The MIT License (MIT) +// +// Copyright (c) 2019 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import * as testLab from "../../../src/v1/providers/testLab"; +import { MINIMAL_V1_ENDPOINT } from "../../fixtures"; + +describe("Test Lab Functions", () => { + describe("#onComplete", () => { + describe("with process.env.GCLOUD_PROJECT set", () => { + before(() => { + process.env.GCLOUD_PROJECT = "project1"; + }); + + after(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should return a trigger/endpoint with appropriate values", () => { + const func = testLab.testMatrix().onComplete(() => null); + + expect(func.__trigger).to.deep.equal({ + eventTrigger: { + service: "testing.googleapis.com", + eventType: "google.testing.testMatrix.complete", + resource: "projects/project1/testMatrices/{matrix}", + }, + }); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V1_ENDPOINT, + platform: "gcfv1", + eventTrigger: { + eventType: "google.testing.testMatrix.complete", + eventFilters: { + resource: "projects/project1/testMatrices/{matrix}", + }, + retry: false, + }, + labels: {}, + }); + }); + + it('should parse TestMatrix in "INVALID" state', () => { + const event = { + data: { + clientInfo: { + name: "test", + }, + invalidMatrixDetails: "INVALID_INPUT_APK", + resultStorage: { + googleCloudStorage: { + gcsPath: "gs://test.appspot.com", + }, + }, + state: "INVALID", + testMatrixId: "matrix-375mfeu9mnw8t", + timestamp: "2019-04-15T17:43:32.538Z", + }, + context: { + resource: {}, + }, + }; + const expected = { + testMatrixId: "matrix-375mfeu9mnw8t", + state: "INVALID", + createTime: "2019-04-15T17:43:32.538Z", + outcomeSummary: undefined, + invalidMatrixDetails: "INVALID_INPUT_APK", + resultStorage: { + gcsPath: "gs://test.appspot.com", + resultsUrl: undefined, + toolResultsHistoryId: undefined, + toolResultsExecutionId: undefined, + } as testLab.ResultStorage, + clientInfo: { + name: "test", + details: {}, + } as testLab.ClientInfo, + } as testLab.TestMatrix; + const func = testLab.testMatrix().onComplete((matrix) => matrix); + return expect(func(event.data, event.context)).to.eventually.deep.equal(expected); + }); + + it('should parse TestMatrix in "FINISHED" state', () => { + const event = { + data: { + clientInfo: { + name: "test", + }, + outcomeSummary: "FAILURE", + resultStorage: { + googleCloudStorage: { + gcsPath: "gs://test.appspot.com", + }, + toolResultsExecution: { + executionId: "6352915701487950333", + historyId: "bh.9b6f4dac24d3049", + projectId: "test", + }, + toolResultsHistory: { + historyId: "bh.9b6f4dac24d3049", + projectId: "test", + }, + resultsUrl: "https://path/to/results", + }, + state: "FINISHED", + testMatrixId: "matrix-tsgjk8pnvxhya", + timestamp: "2019-04-15T18:03:11.115Z", + }, + context: { + resource: {}, + }, + }; + const expected = { + testMatrixId: "matrix-tsgjk8pnvxhya", + state: "FINISHED", + createTime: "2019-04-15T18:03:11.115Z", + outcomeSummary: "FAILURE", + invalidMatrixDetails: undefined, + resultStorage: { + gcsPath: "gs://test.appspot.com", + toolResultsHistoryId: "bh.9b6f4dac24d3049", + toolResultsExecutionId: "6352915701487950333", + resultsUrl: "https://path/to/results", + } as testLab.ResultStorage, + clientInfo: { + name: "test", + details: {}, + } as testLab.ClientInfo, + } as testLab.TestMatrix; + const func = testLab.testMatrix().onComplete((matrix) => matrix); + return expect(func(event.data, event.context)).to.eventually.deep.equal(expected); + }); + }); + + describe("process.env.GCLOUD_PROJECT not set", () => { + it("should not throw if trigger is not accessed", () => { + expect(() => testLab.testMatrix().onComplete(() => null)).to.not.throw(Error); + }); + + it("should throw when trigger is accessed", () => { + expect(() => testLab.testMatrix().onComplete(() => null).__trigger).to.throw(Error); + }); + + it("should throw when endpoint is accessed", () => { + expect(() => testLab.testMatrix().onComplete(() => null).__endpoint).to.throw(Error); + }); + }); + }); + + describe("TestMatrix", () => { + describe("constructor", () => { + it("should populate basic fields", () => { + const expected = { + testMatrixId: "id1", + createTime: "2019-02-08T18:50:32.178Z", + state: "FINISHED", + outcomeSummary: "SUCCESS", + invalidMatrixDetails: "DETAILS_UNAVAILABLE", + resultStorage: new testLab.ResultStorage(), + clientInfo: new testLab.ClientInfo(), + } as testLab.TestMatrix; + const actual = new testLab.TestMatrix({ + testMatrixId: "id1", + timestamp: "2019-02-08T18:50:32.178Z", + state: "FINISHED", + outcomeSummary: "SUCCESS", + invalidMatrixDetails: "DETAILS_UNAVAILABLE", + }); + expect(actual).to.deep.equal(expected); + }); + }); + }); + + describe("ClientInfo", () => { + describe("constructor", () => { + it("should populate basic fields", () => { + const expected = { + name: "client", + details: {}, + } as testLab.ClientInfo; + const actual = new testLab.ClientInfo({ + name: "client", + }); + expect(actual).to.deep.equal(expected); + }); + + it("should populate key/value details", () => { + const expected = { + name: "client", + details: { + k0: "v0", + k1: "", + }, + } as testLab.ClientInfo; + const actual = new testLab.ClientInfo({ + name: "client", + clientInfoDetails: [ + { + key: "k0", + value: "v0", + }, + { + key: "k1", + }, + ], + }); + expect(actual).to.deep.equal(expected); + }); + }); + }); + + describe("ResultStorage", () => { + describe("constructor", () => { + it("should populate basic fields", () => { + const expected = { + gcsPath: "path", + toolResultsHistoryId: "h1", + toolResultsExecutionId: "e2", + resultsUrl: "http://example.com/", + } as testLab.ResultStorage; + const actual = new testLab.ResultStorage({ + googleCloudStorage: { + gcsPath: "path", + }, + toolResultsHistory: { + projectId: "p1", + historyId: "h1", + }, + toolResultsExecution: { + projectId: "p2", + historyId: "h2", + executionId: "e2", + }, + resultsUrl: "http://example.com/", + }); + expect(actual).to.deep.equal(expected); + }); + + it("should not throw on unset fields", () => { + expect(() => new testLab.ResultStorage({})).to.not.throw(); + }); + }); + }); +}); diff --git a/spec/v1/utils.spec.ts b/spec/v1/utils.spec.ts new file mode 100644 index 000000000..0f050ad35 --- /dev/null +++ b/spec/v1/utils.spec.ts @@ -0,0 +1,41 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { applyChange } from "../../src/common/utilities/utils"; + +describe("utils", () => { + describe(".applyChange(from: any, to: any): any", () => { + it("should return the to value for non-object values of from and to", () => { + expect(applyChange({ a: "b" }, null)).to.eq(null); + expect(applyChange(null, { a: "b" })).to.deep.equal({ a: "b" }); + expect(applyChange(23, null)).to.be.null; + }); + + it("should return the merged value of two objects", () => { + const from = { a: { b: "foo", c: 23, d: 444 }, d: { e: 42 } }; + const to: any = { a: { b: "bar", c: null }, d: null, e: { f: "g" } }; + const result = { a: { b: "bar", d: 444 }, e: { f: "g" } }; + expect(applyChange(from, to)).to.deep.equal(result); + }); + }); +}); diff --git a/changelog.txt b/spec/v2/params.spec.ts similarity index 100% rename from changelog.txt rename to spec/v2/params.spec.ts diff --git a/spec/v2/providers/alerts/alerts.spec.ts b/spec/v2/providers/alerts/alerts.spec.ts new file mode 100644 index 000000000..9f69f0555 --- /dev/null +++ b/spec/v2/providers/alerts/alerts.spec.ts @@ -0,0 +1,231 @@ +import { expect } from "chai"; +import { CloudEvent, onInit } from "../../../../src/v2"; +import * as options from "../../../../src/v2/options"; +import * as alerts from "../../../../src/v2/providers/alerts"; +import { FULL_OPTIONS } from "../fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../../fixtures"; + +const ALERT_TYPE = "new-alert-type"; +const APPID = "123456789"; + +const ALERT_EVENT_FILTER = { + alerttype: ALERT_TYPE, +}; + +const ALERT_APP_EVENT_FILTER = { + alerttype: ALERT_TYPE, + appid: APPID, +}; + +describe("alerts", () => { + describe("onAlertPublished", () => { + it("should create the function without opts", () => { + const result = alerts.onAlertPublished(ALERT_TYPE, () => 42); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create the function with opts", () => { + const result = alerts.onAlertPublished( + { + ...FULL_OPTIONS, + alertType: ALERT_TYPE, + appId: APPID, + }, + () => 42 + ); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should have a .run method", () => { + const func = alerts.onAlertPublished(ALERT_TYPE, (event) => event); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + }); + + describe("getEndpointAnnotation", () => { + beforeEach(() => { + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + options.setGlobalOptions({}); + delete process.env.GCLOUD_PROJECT; + }); + + it("should define the endpoint without appId and opts", () => { + expect(alerts.getEndpointAnnotation({}, ALERT_TYPE)).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should define a complex endpoint without appId", () => { + expect(alerts.getEndpointAnnotation({ ...FULL_OPTIONS }, ALERT_TYPE)).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should define a complex endpoint", () => { + expect(alerts.getEndpointAnnotation({ ...FULL_OPTIONS }, ALERT_TYPE, APPID)).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should merge global & specific opts", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + const specificOpts = { + region: "us-west1", + minInstances: 3, + }; + + expect(alerts.getEndpointAnnotation(specificOpts, ALERT_TYPE, APPID)).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + concurrency: 20, + region: ["us-west1"], + minInstances: 3, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + }); + + describe("getOptsAndAlertTypeAndApp", () => { + it("should parse a string", () => { + const [opts, alertType, appId] = alerts.getOptsAndAlertTypeAndApp(ALERT_TYPE); + + expect(opts).to.deep.equal({}); + expect(alertType).to.equal(ALERT_TYPE); + expect(appId).to.be.undefined; + }); + + it("should parse an options object without appId", () => { + const myOpts: alerts.FirebaseAlertOptions = { + alertType: ALERT_TYPE, + region: "us-west1", + }; + + const [opts, alertType, appId] = alerts.getOptsAndAlertTypeAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(alertType).to.equal(myOpts.alertType); + expect(appId).to.be.undefined; + }); + + it("should parse an options object with appId", () => { + const myOpts: alerts.FirebaseAlertOptions = { + alertType: ALERT_TYPE, + appId: APPID, + region: "us-west1", + }; + + const [opts, alertType, appId] = alerts.getOptsAndAlertTypeAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(alertType).to.equal(myOpts.alertType); + expect(appId).to.be.equal(myOpts.appId); + }); + }); + + describe("convertAlertAndApp", () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + it("should leave event unchanged if alerttype & appid are missing", () => { + const raw = { ...event }; + + const converted = alerts.convertAlertAndApp(raw); + + expect(raw).to.deep.eq(converted); + }); + + it("should convert alerttype & appid when present", () => { + const raw = { + ...event, + alerttype: "my-alert", + appid: "my-app", + }; + + const converted = alerts.convertAlertAndApp(raw); + + expect(converted).to.deep.eq({ + ...event, + alerttype: "my-alert", + appid: "my-app", + alertType: "my-alert", + appId: "my-app", + }); + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await alerts.onAlertPublished("alert", () => null)(event); + expect(hello).to.equal("world"); + }); +}); diff --git a/spec/v2/providers/alerts/appDistribution.spec.ts b/spec/v2/providers/alerts/appDistribution.spec.ts new file mode 100644 index 000000000..045b84448 --- /dev/null +++ b/spec/v2/providers/alerts/appDistribution.spec.ts @@ -0,0 +1,229 @@ +import { expect } from "chai"; +import * as alerts from "../../../../src/v2/providers/alerts"; +import * as appDistribution from "../../../../src/v2/providers/alerts/appDistribution"; +import { FULL_OPTIONS } from "../fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../../fixtures"; +import { onInit } from "../../../../src/v2/core"; + +const APPID = "123456789"; +const myHandler = () => 42; + +const APP_EVENT_FILTER = { + appid: APPID, +}; + +describe("appDistribution", () => { + describe("onNewTesterIosDevicePublished", () => { + it("should create a function with alertType & appId", () => { + const func = appDistribution.onNewTesterIosDevicePublished(APPID, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: appDistribution.newTesterIosDeviceAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = appDistribution.onNewTesterIosDevicePublished({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: appDistribution.newTesterIosDeviceAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with appid in opts", () => { + const func = appDistribution.onNewTesterIosDevicePublished( + { ...FULL_OPTIONS, appId: APPID }, + myHandler + ); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: appDistribution.newTesterIosDeviceAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function without opts or appId", () => { + const func = appDistribution.onNewTesterIosDevicePublished(myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: appDistribution.newTesterIosDeviceAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with a run method", () => { + const func = appDistribution.onNewTesterIosDevicePublished(APPID, (event) => event); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + + it("calls init function", async () => { + const func = appDistribution.onNewTesterIosDevicePublished(APPID, (event) => event); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "test" } as any); + expect(hello).to.equal("world"); + }); + }); + + describe("onInAppfeedbackPublished", () => { + it("should create a function with alertType & appId", () => { + const func = appDistribution.onInAppFeedbackPublished(APPID, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: appDistribution.inAppFeedbackAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = appDistribution.onInAppFeedbackPublished({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: appDistribution.inAppFeedbackAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with appid in opts", () => { + const func = appDistribution.onInAppFeedbackPublished( + { ...FULL_OPTIONS, appId: APPID }, + myHandler + ); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: appDistribution.inAppFeedbackAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function without opts or appId", () => { + const func = appDistribution.onInAppFeedbackPublished(myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: appDistribution.inAppFeedbackAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with a run method", () => { + const func = appDistribution.onInAppFeedbackPublished(APPID, (event) => event); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + + it("calls init function", async () => { + const func = appDistribution.onInAppFeedbackPublished(APPID, (event) => event); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "test" } as any); + expect(hello).to.equal("world"); + }); + }); + + describe("getOptsAndApp", () => { + it("should parse a string", () => { + const [opts, appId] = appDistribution.getOptsAndApp(APPID); + + expect(opts).to.deep.equal({}); + expect(appId).to.equal(APPID); + }); + + it("should parse an options object without appId", () => { + const myOpts: appDistribution.AppDistributionOptions = { + region: "us-west1", + }; + + const [opts, appId] = appDistribution.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.be.undefined; + }); + + it("should parse an options object with appId", () => { + const myOpts: appDistribution.AppDistributionOptions = { + appId: APPID, + region: "us-west1", + }; + + const [opts, appId] = appDistribution.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.equal(APPID); + }); + }); +}); diff --git a/spec/v2/providers/alerts/billing.spec.ts b/spec/v2/providers/alerts/billing.spec.ts new file mode 100644 index 000000000..a0020f83b --- /dev/null +++ b/spec/v2/providers/alerts/billing.spec.ts @@ -0,0 +1,154 @@ +import { expect } from "chai"; +import * as alerts from "../../../../src/v2/providers/alerts"; +import * as billing from "../../../../src/v2/providers/alerts/billing"; +import { FULL_OPTIONS } from "../fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../../fixtures"; +import { onInit } from "../../../../src/v2/core"; + +const ALERT_TYPE = "new-alert-type"; +const myHandler = () => 42; + +describe("billing", () => { + describe("onPlanUpdatePublished", () => { + it("should create a function with only handler", () => { + const func = billing.onPlanUpdatePublished(myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: billing.planUpdateAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts & handler", () => { + const func = billing.onPlanUpdatePublished({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: billing.planUpdateAlert, + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const func = billing.onPlanAutomatedUpdatePublished((event) => event); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "test" } as any); + expect(hello).to.equal("world"); + }); + }); + + describe("onPlanAutomatedUpdatePublished", () => { + it("should create a function with only handler", () => { + const func = billing.onPlanAutomatedUpdatePublished(myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: billing.planAutomatedUpdateAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts & handler", () => { + const func = billing.onPlanAutomatedUpdatePublished({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: billing.planAutomatedUpdateAlert, + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const func = billing.onPlanAutomatedUpdatePublished((event) => event); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "test" } as any); + expect(hello).to.equal("world"); + }); + }); + + describe("onOperation", () => { + it("should create a function with alertType only", () => { + const func = billing.onOperation(ALERT_TYPE, myHandler, undefined); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: ALERT_TYPE, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = billing.onOperation(ALERT_TYPE, { ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: ALERT_TYPE, + }, + retry: false, + }, + }); + }); + + it("should create a function with a run method", () => { + const func = billing.onOperation(ALERT_TYPE, (event) => event, undefined); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + + it("calls init function", async () => { + const func = billing.onOperation(ALERT_TYPE, (event) => event, undefined); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "test" } as any); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/alerts/crashlytics.spec.ts b/spec/v2/providers/alerts/crashlytics.spec.ts new file mode 100644 index 000000000..496f6f10c --- /dev/null +++ b/spec/v2/providers/alerts/crashlytics.spec.ts @@ -0,0 +1,235 @@ +import { expect } from "chai"; +import * as alerts from "../../../../src/v2/providers/alerts"; +import * as crashlytics from "../../../../src/v2/providers/alerts/crashlytics"; +import { FULL_OPTIONS } from "../fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../../fixtures"; +import { onInit } from "../../../../src/v2/core"; + +const ALERT_TYPE = "new-alert-type"; +const APPID = "123456789"; +const myHandler = () => 42; + +describe("crashlytics", () => { + const testcases = [ + { + method: "onNewFatalIssuePublished", + event: crashlytics.newFatalIssueAlert, + }, + { + method: "onNewNonfatalIssuePublished", + event: crashlytics.newNonfatalIssueAlert, + }, + { + method: "onRegressionAlertPublished", + event: crashlytics.regressionAlert, + }, + { + method: "onStabilityDigestPublished", + event: crashlytics.stabilityDigestAlert, + }, + { + method: "onVelocityAlertPublished", + event: crashlytics.velocityAlert, + }, + { + method: "onNewAnrIssuePublished", + event: crashlytics.newAnrIssueAlert, + }, + ]; + + for (const { method, event } of testcases) { + const ALERT_EVENT_FILTER = { + alerttype: event, + }; + + const ALERT_APP_EVENT_FILTER = { + ...ALERT_EVENT_FILTER, + appid: APPID, + }; + + describe(method, () => { + it("should create a function only handler", () => { + const func = crashlytics[method](myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with appId", () => { + const func = crashlytics[method](APPID, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with base opts", () => { + const func = crashlytics[method]({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = crashlytics[method]({ ...FULL_OPTIONS, appId: APPID }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const func = crashlytics[method](APPID, myHandler); + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await func({ data: "crash" } as any); + expect(hello).to.equal("world"); + }); + }); + } + + const ALERT_EVENT_FILTER = { + alerttype: ALERT_TYPE, + }; + + const ALERT_APP_EVENT_FILTER = { + ...ALERT_EVENT_FILTER, + appid: APPID, + }; + + describe("onOperation", () => { + it("should create a function with alertType only", () => { + const func = crashlytics.onOperation(ALERT_TYPE, myHandler, undefined); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with alertType & appId", () => { + const func = crashlytics.onOperation(ALERT_TYPE, APPID, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with base opts", () => { + const func = crashlytics.onOperation(ALERT_TYPE, { ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with appid in opts", () => { + const func = crashlytics.onOperation( + ALERT_TYPE, + { ...FULL_OPTIONS, appId: APPID }, + myHandler + ); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: ALERT_APP_EVENT_FILTER, + retry: false, + }, + }); + }); + + it("should create a function with a run method", () => { + const func = crashlytics.onOperation(ALERT_TYPE, (event) => event, undefined); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + }); + + describe("getOptsAndApp", () => { + it("should parse a string", () => { + const APPID = "123456789"; + + const [opts, appId] = crashlytics.getOptsAndApp(APPID); + + expect(opts).to.deep.equal({}); + expect(appId).to.equal(APPID); + }); + + it("should parse an options object without appId", () => { + const myOpts: crashlytics.CrashlyticsOptions = { + region: "us-west1", + }; + + const [opts, appId] = crashlytics.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.be.undefined; + }); + + it("should parse an options object with appId", () => { + const myOpts: crashlytics.CrashlyticsOptions = { + appId: "123456789", + region: "us-west1", + }; + + const [opts, appId] = crashlytics.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.equal(myOpts.appId); + }); + }); +}); diff --git a/spec/v2/providers/alerts/performance.spec.ts b/spec/v2/providers/alerts/performance.spec.ts new file mode 100644 index 000000000..01004e3f6 --- /dev/null +++ b/spec/v2/providers/alerts/performance.spec.ts @@ -0,0 +1,185 @@ +import { expect } from "chai"; +import * as alerts from "../../../../src/v2/providers/alerts"; +import * as performance from "../../../../src/v2/providers/alerts/performance"; +import { FULL_OPTIONS } from "../fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../../fixtures"; +import { CloudEvent, onInit } from "../../../../src/v2/core"; + +const APPID = "123456789"; +const myHandler = () => 42; + +const APP_EVENT_FILTER = { + appid: APPID, +}; + +describe("performance", () => { + describe("onThresholdAlertPublished", () => { + it("should create a function with alertType & appId", () => { + const func = performance.onThresholdAlertPublished(APPID, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: performance.thresholdAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = performance.onThresholdAlertPublished({ ...FULL_OPTIONS }, myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: performance.thresholdAlert, + }, + retry: false, + }, + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello: string; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await performance.onThresholdAlertPublished(() => null)(event); + expect(hello).to.equal("world"); + }); + }); + + it("should create a function with appid in opts", () => { + const func = performance.onThresholdAlertPublished( + { ...FULL_OPTIONS, appId: APPID }, + myHandler + ); + + expect(func.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + ...APP_EVENT_FILTER, + alerttype: performance.thresholdAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function without opts or appId", () => { + const func = performance.onThresholdAlertPublished(myHandler); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: alerts.eventType, + eventFilters: { + alerttype: performance.thresholdAlert, + }, + retry: false, + }, + }); + }); + + it("should create a function with a run method", () => { + const func = performance.onThresholdAlertPublished(APPID, (event) => event); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + }); + + describe("getOptsAndApp", () => { + it("should parse a string", () => { + const [opts, appId] = performance.getOptsAndApp(APPID); + + expect(opts).to.deep.equal({}); + expect(appId).to.equal(APPID); + }); + + it("should parse an options object without appId", () => { + const myOpts: performance.PerformanceOptions = { + region: "us-west1", + }; + + const [opts, appId] = performance.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.be.undefined; + }); + + it("should parse an options object with appId", () => { + const myOpts: performance.PerformanceOptions = { + appId: APPID, + region: "us-west1", + }; + + const [opts, appId] = performance.getOptsAndApp(myOpts); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(appId).to.equal(APPID); + }); + }); + + describe("convertPayload", () => { + it("should return the same payload", () => { + const payload = { + a: "b", + conditionPercentile: 23, + appVersion: "3", + }; + + const convertedPayload = performance.convertPayload(payload as any); + + expect(convertedPayload).to.deep.eq(payload); + }); + + it("should return the same payload if the fields are undefined", () => { + const payload = { + a: "b", + }; + + const convertedPayload = performance.convertPayload(payload as any); + + expect(convertedPayload).to.deep.eq({ + a: "b", + }); + }); + + it("should remove fields", () => { + const payload = { + a: "b", + conditionPercentile: 0, + appVersion: "", + }; + + const convertedPayload = performance.convertPayload(payload as any); + + expect(convertedPayload).to.deep.eq({ + a: "b", + }); + }); + }); +}); diff --git a/spec/v2/providers/database.spec.ts b/spec/v2/providers/database.spec.ts new file mode 100644 index 000000000..9eabf61ca --- /dev/null +++ b/spec/v2/providers/database.spec.ts @@ -0,0 +1,690 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { PathPattern } from "../../../src/common/utilities/path-pattern"; +import * as database from "../../../src/v2/providers/database"; +import { expectType } from "../../common/metaprogramming"; +import { MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { CloudEvent, onInit } from "../../../src/v2/core"; + +const RAW_RTDB_EVENT: database.RawRTDBCloudEvent = { + data: { + ["@type"]: "type.googleapis.com/google.events.firebase.database.v1.ReferenceEventData", + data: {}, + delta: {}, + }, + firebasedatabasehost: "firebaseio.com", + instance: "my-instance", + ref: "foo/bar", + location: "us-central1", + id: "id", + source: "source", + specversion: "1.0", + time: "time", + type: "type", +}; + +describe("database", () => { + describe("makeParams", () => { + it("should make params with basic path", () => { + const event: database.RawRTDBCloudEvent = { + ...RAW_RTDB_EVENT, + ref: "match_a/something/else/nothing/end/match_b", + }; + + expect( + database.makeParams( + event, + new PathPattern("{a}/something/else/*/end/{b}"), + new PathPattern("*") + ) + ).to.deep.equal({ + a: "match_a", + b: "match_b", + }); + }); + + it("should make params with multi segment path", () => { + const event: database.RawRTDBCloudEvent = { + ...RAW_RTDB_EVENT, + ref: "something/is/a/thing/else/match_a/hello/match_b/world", + }; + + expect( + database.makeParams( + event, + new PathPattern("something/**/else/{a}/hello/{b}/world"), + new PathPattern("*") + ) + ).to.deep.equal({ + a: "match_a", + b: "match_b", + }); + }); + + it("should make params with multi segment path capture", () => { + const event: database.RawRTDBCloudEvent = { + ...RAW_RTDB_EVENT, + ref: "something/is/a/thing/else/match_a/hello/match_b/world", + }; + + expect( + database.makeParams( + event, + new PathPattern("something/{path=**}/else/{a}/hello/{b}/world"), + new PathPattern("*") + ) + ).to.deep.equal({ + path: "is/a/thing", + a: "match_a", + b: "match_b", + }); + }); + + it("should make params for a full path and instance", () => { + const event: database.RawRTDBCloudEvent = { + ...RAW_RTDB_EVENT, + ref: "something/is/a/thing/else/match_a/hello/match_b/world", + }; + + expect( + database.makeParams( + event, + new PathPattern("something/{path=**}/else/{a}/hello/{b}/world"), + new PathPattern("*") + ) + ).to.deep.equal({ + path: "is/a/thing", + a: "match_a", + b: "match_b", + }); + }); + }); + + describe("getOpts", () => { + it("should return opts when passed in a path", () => { + expect(database.getOpts("/foo/{bar}/")).to.deep.equal({ + path: "foo/{bar}", + instance: "*", + opts: {}, + }); + }); + + it("should return opts when passed in an options object", () => { + expect( + database.getOpts({ + ref: "/foo/{bar}/", + region: "us-central1", + }) + ).to.deep.equal({ + path: "foo/{bar}", + instance: "*", + opts: { + region: "us-central1", + }, + }); + }); + }); + + describe("makeEndpoint", () => { + it("should create an endpoint with an instance wildcard", () => { + const ep = database.makeEndpoint( + database.writtenEventType, + { + region: "us-central1", + labels: { 1: "2" }, + }, + new PathPattern("foo/bar"), + new PathPattern("*") + ); + + expect(ep).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: { + 1: "2", + }, + region: ["us-central1"], + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/bar", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create an endpoint without an instance wildcard", () => { + const ep = database.makeEndpoint( + database.writtenEventType, + { + region: "us-central1", + labels: { 1: "2" }, + }, + new PathPattern("foo/bar"), + new PathPattern("my-instance") + ); + + expect(ep).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: { + 1: "2", + }, + region: ["us-central1"], + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/bar", + }, + retry: false, + }, + }); + }); + }); + + describe("onChangedOperation", () => { + it("should create a function for a written event", () => { + const func = database.onChangedOperation(database.writtenEventType, "/foo/{bar}/", () => 2); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function for a updated event", () => { + const func = database.onChangedOperation(database.updatedEventType, "/foo/{bar}/", () => 2); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.updatedEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a complex function", () => { + const func = database.onChangedOperation( + database.writtenEventType, + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + () => 2 + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + + it("should supply retry", () => { + const func = database.onChangedOperation( + database.writtenEventType, + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + retry: true, + }, + () => 2 + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: true, + }, + }); + }); + }); + + describe("onOperation", () => { + it("should create a function for a created event", () => { + const func = database.onOperation(database.createdEventType, "/foo/{bar}/", () => 2); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.createdEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function for a deleted event", () => { + const func = database.onOperation(database.deletedEventType, "/foo/{bar}/", () => 2); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.deletedEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a complex function", () => { + const func = database.onOperation( + database.createdEventType, + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + () => 2 + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.createdEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + }); + + describe("onValueWritten", () => { + it("should create a function with a reference", () => { + const func = database.onValueWritten("/foo/{bar}/", (event) => { + expectType<{ bar: string }>(event.params); + }); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = database.onValueWritten( + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + (event) => { + expectType<{ path: string; bar: string }>(event.params); + } + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.writtenEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await database.onValueWritten("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onValueCreated", () => { + it("should create a function with a reference", () => { + const func = database.onValueCreated("/foo/{bar}/", (event) => { + expectType<{ bar: string }>(event.params); + }); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.createdEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = database.onValueCreated( + { + ref: "/foo/{path=**}/{bar}/", + instance: "instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + (event) => { + expectType<{ + path: string; + bar: string; + }>(event.params); + } + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.createdEventType, + eventFilters: { + instance: "instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await database.onValueCreated("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onValueUpdated", () => { + it("should create a function with a reference", () => { + const func = database.onValueUpdated("/foo/{bar}/", (event) => { + expectType<{ bar: string }>(event.params); + }); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.updatedEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = database.onValueUpdated( + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + (event) => { + expectType<{ path: string; bar: string }>(event.params); + } + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.updatedEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await database.onValueUpdated("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onValueDeleted", () => { + it("should create a function with a reference", () => { + const func = database.onValueDeleted("/foo/{bar}/", (event) => { + expectType<{ bar: string }>(event.params); + }); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: database.deletedEventType, + eventFilters: {}, + eventFilterPathPatterns: { + ref: "foo/{bar}", + instance: "*", + }, + retry: false, + }, + }); + }); + + it("should create a function with opts", () => { + const func = database.onValueDeleted( + { + ref: "/foo/{path=**}/{bar}/", + instance: "my-instance", + region: "us-central1", + cpu: "gcf_gen1", + minInstances: 2, + }, + (event) => { + expectType<{ path: string; bar: string }>(event.params); + } + ); + + expect(func.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + cpu: "gcf_gen1", + minInstances: 2, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: database.deletedEventType, + eventFilters: { + instance: "my-instance", + }, + eventFilterPathPatterns: { + ref: "foo/{path=**}/{bar}", + }, + retry: false, + }, + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await database.onValueDeleted("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/dataconnect.spec.ts b/spec/v2/providers/dataconnect.spec.ts new file mode 100644 index 000000000..a85e4ee44 --- /dev/null +++ b/spec/v2/providers/dataconnect.spec.ts @@ -0,0 +1,530 @@ +// The MIT License (MIT) +// +// Copyright (c) 2025 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as dataconnect from "../../../src/v2/providers/dataconnect"; +import { CloudEvent } from "../../../src/v2"; +import { onInit } from "../../../src/v2/core"; +import { expectExtends } from "../../common/metaprogramming"; + +const expectedEndpointBase = { + platform: "gcfv2", + availableMemoryMb: {}, + concurrency: {}, + ingressSettings: {}, + maxInstances: {}, + minInstances: {}, + serviceAccountEmail: {}, + timeoutSeconds: {}, + vpc: {}, + labels: {}, +}; + +function makeExpectedEndpoint(eventType: string, eventFilters, eventFilterPathPatterns) { + return { + ...expectedEndpointBase, + eventTrigger: { + eventType, + eventFilters, + eventFilterPathPatterns, + retry: false, + }, + }; +} + +describe("dataconnect", () => { + describe("params", () => { + it("extracts {segment} captures", () => { + expectExtends< + Record<"myConnector", string>, + dataconnect.DataConnectParams<"/{myConnector}"> + >(); + }); + + it("extracts nothing from strings without params", () => { + expectExtends, dataconnect.DataConnectParams<"foo/bar">>(); + expectExtends, dataconnect.DataConnectParams<"/foo/bar">>(); + }); + + it("extracts {segment} captures from options", () => { + expectExtends< + Record<"myService", string>, + dataconnect.DataConnectParams<{ + service: "{myService}"; + connector: "connector"; + operation: "operation"; + }> + >(); + + expectExtends< + { myService: string; [key: string]: string }, + dataconnect.DataConnectParams< + dataconnect.OperationOptions<"{myService}", "connector", "operation"> + > + >(); + }); + + it("extracts {segment=*} captures from options", () => { + expectExtends< + Record<"myConnector", string>, + dataconnect.DataConnectParams< + dataconnect.OperationOptions + > + >(); + }); + + it("extracts {segment=**} captures from options", () => { + expectExtends< + Record<"myOperation", string>, + dataconnect.DataConnectParams< + dataconnect.OperationOptions + > + >(); + }); + + it("extracts multiple captures from options", () => { + expectExtends< + Record<"myService" | "myConnector" | "myOperation", string>, + dataconnect.DataConnectParams< + dataconnect.OperationOptions<"{myService}", "{myConnector=*}", "{myOperation=**}"> + > + >(); + }); + + it("extracts nothing from options without params", () => { + expectExtends< + Record, + dataconnect.DataConnectParams<{ + service: "service"; + connector: "connector"; + operation: "operation"; + }> + >(); + + expectExtends< + Record, + dataconnect.DataConnectParams> + >(); + }); + }); + + describe("onMutationExecuted", () => { + it("should create a func", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + operation: "my-operation", + }, + {} + ); + + const func = dataconnect.onMutationExecuted( + "services/my-service/connectors/my-connector/operations/my-operation", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + operation: "my-operation", + }, + {} + ); + + const func = dataconnect.onMutationExecuted( + { + service: "my-service", + connector: "my-connector", + operation: "my-operation", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with a service path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + connector: "my-connector", + operation: "my-operation", + }, + { + service: "{service}", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/{service}/connectors/my-connector/operations/my-operation", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with a service path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + connector: "my-connector", + operation: "my-operation", + }, + { + service: "{service}", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "{service}", + connector: "my-connector", + operation: "my-operation", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with a connector path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + operation: "my-operation", + }, + { + connector: "{connector}", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/my-service/connectors/{connector}/operations/my-operation", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with a connector path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + operation: "my-operation", + }, + { + connector: "{connector}", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "my-service", + connector: "{connector}", + operation: "my-operation", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with an operation path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + }, + { + operation: "{operation}", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/my-service/connectors/my-connector/operations/{operation}", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with an operation path pattern", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + }, + { + operation: "{operation}", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "my-service", + connector: "my-connector", + operation: "{operation}", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with path patterns", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + {}, + { + service: "{service}", + connector: "{connector}", + operation: "{operation}", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/{service}/connectors/{connector}/operations/{operation}", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with path patterns", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + {}, + { + service: "{service}", + connector: "{connector}", + operation: "{operation}", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "{service}", + connector: "{connector}", + operation: "{operation}", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with a service wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + connector: "my-connector", + operation: "my-operation", + }, + { + service: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/*/connectors/my-connector/operations/my-operation", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with a service wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + connector: "my-connector", + operation: "my-operation", + }, + { + service: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "*", + connector: "my-connector", + operation: "my-operation", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with a connector wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + operation: "my-operation", + }, + { + connector: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/my-service/connectors/*/operations/my-operation", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with a connector wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + operation: "my-operation", + }, + { + connector: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "my-service", + connector: "*", + operation: "my-operation", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with an operation wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + }, + { + operation: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/my-service/connectors/my-connector/operations/*", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with an operation wildcard", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + { + service: "my-service", + connector: "my-connector", + }, + { + operation: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "my-service", + connector: "my-connector", + operation: "*", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func with wildcards", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + {}, + { + service: "*", + connector: "*", + operation: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + "services/*/connectors/*/operations/*", + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func using param opts with wildcards", () => { + const expectedEndpoint = makeExpectedEndpoint( + dataconnect.mutationExecutedEventType, + {}, + { + service: "*", + connector: "*", + operation: "*", + } + ); + + const func = dataconnect.onMutationExecuted( + { + service: "*", + connector: "*", + operation: "*", + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("should create a func in the absence of param opts", () => { + const expectedEndpoint = makeExpectedEndpoint(dataconnect.mutationExecutedEventType, {}, {}); + + const func = dataconnect.onMutationExecuted({}, () => true); + expect(func.__endpoint).to.deep.eq(expectedEndpoint); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "google.firebase.dataconnect.connector.v1.mutationExecuted", + type: "type", + time: "time", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await dataconnect.onMutationExecuted( + "services/*/connectors/*/operations/*", + () => null + )(event); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/eventarc.spec.ts b/spec/v2/providers/eventarc.spec.ts new file mode 100644 index 000000000..28696319a --- /dev/null +++ b/spec/v2/providers/eventarc.spec.ts @@ -0,0 +1,171 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as options from "../../../src/v2/options"; +import * as eventarc from "../../../src/v2/providers/eventarc"; +import { FULL_OPTIONS } from "./fixtures"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { CloudEvent, onInit } from "../../../src/v2/core"; + +const ENDPOINT_EVENT_TRIGGER = { + eventType: "event-type", + retry: false, + eventFilters: {}, +}; + +describe("v2/eventarc", () => { + describe("onCustomEventPublished", () => { + beforeEach(() => { + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + options.setGlobalOptions({}); + delete process.env.GCLOUD_PROJECT; + }); + + it("should create a minimal trigger/endpoint with default channel", () => { + const result = eventarc.onCustomEventPublished("event-type", () => 42); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + channel: "locations/us-central1/channels/firebase", + }, + }); + }); + + it("should create a minimal trigger/endpoint with opts", () => { + const result = eventarc.onCustomEventPublished( + { eventType: "event-type", region: "us-west1" }, + () => 42 + ); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + channel: "locations/us-central1/channels/firebase", + }, + region: ["us-west1"], + }); + }); + + it("should create a minimal trigger with channel with opts", () => { + const result = eventarc.onCustomEventPublished( + { + eventType: "event-type", + channel: "locations/us-west1/channels/my-channel", + filters: { foo: "bar" }, + }, + () => 42 + ); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + channel: "locations/us-west1/channels/my-channel", + eventFilters: { + foo: "bar", + }, + }, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = eventarc.onCustomEventPublished( + { + ...FULL_OPTIONS, + eventType: "event-type", + channel: "locations/us-west1/channels/my-channel", + }, + () => 42 + ); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + channel: "locations/us-west1/channels/my-channel", + }, + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + + const result = eventarc.onCustomEventPublished( + { + eventType: "event-type", + channel: "locations/us-west1/channels/my-channel", + region: "us-west1", + minInstances: 3, + }, + () => 42 + ); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + region: ["us-west1"], + labels: {}, + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + channel: "locations/us-west1/channels/my-channel", + }, + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await eventarc.onCustomEventPublished("type", () => null)(event); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/firestore.spec.ts b/spec/v2/providers/firestore.spec.ts new file mode 100644 index 000000000..e5406bdb3 --- /dev/null +++ b/spec/v2/providers/firestore.spec.ts @@ -0,0 +1,1286 @@ +// The MIT License (MIT) +// +// Copyright (c) 2023 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { google } from "../../../protos/compiledFirestore"; +import { Timestamp } from "firebase-admin/firestore"; +import * as firestore from "../../../src/v2/providers/firestore"; +import { PathPattern } from "../../../src/common/utilities/path-pattern"; +import { onInit } from "../../../src/v2/core"; +import * as params from "../../../src/params"; + +/** static-complied protobuf */ +const DocumentEventData = google.events.cloud.firestore.v1.DocumentEventData; + +const eventBase = { + location: "us-central1", + project: "my-project", + database: "my-db", + namespace: "my-ns", + document: "foo/fGRodw71mHutZ4wGDuT8", + datacontenttype: "application/protobuf", + dataschema: + "https://github.com/googleapis/google-cloudevents/blob/main/proto/google/events/cloud/firestore/v1/data.proto", + id: "379ad868-5ef9-4c84-a8ba-f75f1b056663", + source: "projects/my-project/databases/my-db/documents/d", + subject: "documents/foo/fGRodw71mHutZ4wGDuT8", + specversion: "1.0" as const, + time: "2023-03-10T18:20:43.677647Z", + type: "google.cloud.firestore.document.v1.created", +}; + +const expectedEndpointBase = { + platform: "gcfv2", + availableMemoryMb: {}, + concurrency: {}, + ingressSettings: {}, + maxInstances: {}, + minInstances: {}, + serviceAccountEmail: {}, + timeoutSeconds: {}, + vpc: {}, + labels: {}, +}; + +function makeExpectedEp(eventType: string, eventFilters, eventFilterPathPatterns) { + return { + ...expectedEndpointBase, + eventTrigger: { + eventType, + eventFilters, + eventFilterPathPatterns, + retry: false, + }, + }; +} + +function makeEncodedProtobuf(data: any) { + return DocumentEventData.encode(data).finish(); +} + +function makeEvent(data?: any): firestore.RawFirestoreEvent { + return { + ...eventBase, + data, + } as firestore.RawFirestoreEvent; +} + +function makeAuthEvent(data?: any): firestore.RawFirestoreAuthEvent { + return { + ...eventBase, + data, + authid: "userId", + authtype: "unknown", + } as firestore.RawFirestoreAuthEvent; +} + +const createdData = { + value: { + fields: { + hello: { stringValue: "create world" }, + }, + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, +}; +const createdProto = DocumentEventData.create(createdData); + +const deletedData = { + oldValue: { + fields: { + hello: { stringValue: "delete world" }, + }, + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, +}; +const deletedProto = DocumentEventData.create(deletedData); + +const updatedData = { + value: { + fields: { + hello: { stringValue: "new world" }, + }, + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, + oldValue: { + fields: { + hello: { stringValue: "old world" }, + }, + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, + updateMask: { + fieldPaths: ["hello"], + }, +}; +const updatedProto = DocumentEventData.create(updatedData); + +const writtenData = { + value: { + fields: { + hello: { stringValue: "a new world" }, + }, + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, + oldValue: { + createTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + updateTime: Timestamp.fromDate(new Date("2023-03-10T00:58:40.349Z")), + name: "projects/my-project/databases/my-db/documents/foo/fGRodw71mHutZ4wGDuT8", + }, +}; +const writtenProto = DocumentEventData.create(writtenData); + +describe("firestore", () => { + let docParam: params.Expression; + let nsParam: params.Expression; + let dbParam: params.Expression; + + before(() => { + docParam = params.defineString("DOCUMENT"); + nsParam = params.defineString("NAMESPACE"); + dbParam = params.defineString("DATABASE"); + }); + + after(() => { + params.clearParams(); + }); + + describe("onDocumentWritten", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentWritten("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentWritten( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with param opts", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventType, + { + database: dbParam, + namespace: nsParam, + }, + { + document: docParam, + } + ); + + const func = firestore.onDocumentWritten( + { + database: dbParam, + namespace: nsParam, + document: docParam, + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentWritten("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentCreated", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentCreated("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentCreated( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with param opts", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: dbParam, + namespace: nsParam, + }, + { + document: docParam, + } + ); + + const func = firestore.onDocumentCreated( + { + database: dbParam, + namespace: nsParam, + document: docParam, + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentCreated("type", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentUpdated", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentUpdated("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentUpdated( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with param opts", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventType, + { + database: dbParam, + namespace: nsParam, + }, + { + document: docParam, + } + ); + + const func = firestore.onDocumentUpdated( + { + database: dbParam, + namespace: nsParam, + document: docParam, + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentUpdated("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentDeleted", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentDeleted("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentDeleted( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with param opts", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventType, + { + database: dbParam, + namespace: nsParam, + }, + { + document: docParam, + } + ); + + const func = firestore.onDocumentDeleted( + { + database: dbParam, + namespace: nsParam, + document: docParam, + }, + () => true + ); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentDeleted("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentWrittenWithAuthContext", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventWithAuthContextType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentWrittenWithAuthContext("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventWithAuthContextType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentWrittenWithAuthContext( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentWrittenWithAuthContext("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentCreatedWithAuthContext", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventWithAuthContextType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentCreatedWithAuthContext("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventWithAuthContextType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentCreatedWithAuthContext( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentCreatedWithAuthContext("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentUpdatedWithAuthContext", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventWithAuthContextType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentUpdatedWithAuthContext("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventWithAuthContextType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentUpdatedWithAuthContext( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentUpdatedWithAuthContext("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onDocumentDeletedWithAuthContext", () => { + it("should create a func", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventWithAuthContextType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onDocumentDeletedWithAuthContext("foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func with opts", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventWithAuthContextType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onDocumentDeletedWithAuthContext( + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("calls init function", async () => { + const event: firestore.RawFirestoreEvent = { + ...eventBase, + datacontenttype: "application/json", + data: { + oldValue: null, + value: null, + }, + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await firestore.onDocumentDeletedWithAuthContext("path", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("getOpts", () => { + it("should handle document string", () => { + const { document, database, namespace, opts } = firestore.getOpts("foo/{bar}"); + + expect(document).to.eq("foo/{bar}"); + expect(database).to.eq("(default)"); + expect(namespace).to.eq("(default)"); + expect(opts).to.deep.eq({}); + }); + + it("should parse and opts", () => { + const documentOpts = { + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + region: "us-central1", + }; + + const { document, database, namespace, opts } = firestore.getOpts(documentOpts); + + expect(document).to.eq("foo/{bar}"); + expect(database).to.eq("my-db"); + expect(namespace).to.eq("my-ns"); + expect(opts).to.deep.eq({ region: "us-central1" }); + }); + }); + + describe("createSnapshot", () => { + it("should throw an error on invalid content type", () => { + expect(() => + firestore.createSnapshot({ + ...eventBase, + datacontenttype: "something", + } as any) + ).to.throw("Error: Cannot parse event payload."); + }); + + it("should create snapshot of a protobuf encoded event if datacontexttype is missing", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(createdProto)); + delete rawEvent.datacontenttype; + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "create world" }); + }); + + it("should create snapshot of a protobuf encoded created event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(createdProto)); + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "create world" }); + }); + + it("should create snapshot of a protobuf encoded updated event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(updatedProto)); + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "new world" }); + }); + + it("should create snapshot of a protobuf encoded written event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(writtenProto)); + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "a new world" }); + }); + + it("should create snapshot of a json encoded created event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(createdData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "create world" }); + }); + + it("should create snapshot of a json encoded updated event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(updatedData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "new world" }); + }); + + it("should create snapshot of a json encoded written event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(writtenData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "a new world" }); + }); + }); + + describe("createBeforeSnapshot", () => { + it("should throw an error on invalid content type", () => { + expect(() => + firestore.createBeforeSnapshot({ + ...eventBase, + datacontenttype: "something", + } as any) + ).to.throw("Error: Cannot parse event payload."); + }); + + it("should create before snapshot of a protobuf encoded deleted event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(deletedProto)); + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "delete world" }); + }); + + it("should create before snapshot of a protobuf encoded updated event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(updatedProto)); + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "old world" }); + }); + + it("should create before snapshot of a protobuf encoded written event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(makeEncodedProtobuf(writtenProto)); + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({}); + }); + + it("should create before snapshot of a json encoded deleted event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(deletedData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "delete world" }); + }); + + it("should create before snapshot of a json encoded updated event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(updatedData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({ hello: "old world" }); + }); + + it("should create before snapshot of a json encoded written event", () => { + const rawEvent: firestore.RawFirestoreEvent = makeEvent(writtenData); + rawEvent.datacontenttype = "application/json"; + + const snapshot = firestore.createBeforeSnapshot(rawEvent); + + expect(snapshot.data()).to.deep.eq({}); + }); + }); + + describe("makeParams", () => { + it("should not extract matches with out a path pattern", () => { + const params = firestore.makeParams( + "foo/fGRodw71mHutZ4wGDuT8", + new PathPattern("foo/fGRodw71mHutZ4wGDuT8") + ); + + expect(params).to.deep.eq({}); + }); + + it("should extract matches with a path pattern", () => { + const params = firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")); + + expect(params).to.deep.eq({ + bar: "fGRodw71mHutZ4wGDuT8", + }); + }); + }); + + describe("makeFirestoreEvent", () => { + it("should make event from an event without data", () => { + const event = firestore.makeFirestoreEvent( + firestore.createdEventType, + makeEvent(), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data).to.eq(undefined); + }); + + it("should make event from a created event", () => { + const event = firestore.makeFirestoreEvent( + firestore.createdEventType, + makeEvent(makeEncodedProtobuf(createdProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data.data()).to.deep.eq({ hello: "create world" }); + }); + + it("should make event from a deleted event", () => { + const event = firestore.makeFirestoreEvent( + firestore.deletedEventType, + makeEvent(makeEncodedProtobuf(deletedProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data.data()).to.deep.eq({ hello: "delete world" }); + }); + + it("should make event from a created event with auth context", () => { + const event = firestore.makeFirestoreEvent( + firestore.createdEventWithAuthContextType, + makeAuthEvent(makeEncodedProtobuf(createdProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data.data()).to.deep.eq({ hello: "create world" }); + }); + + it("should include auth fields if provided in raw event", () => { + const event = firestore.makeFirestoreEvent( + firestore.createdEventWithAuthContextType, + makeAuthEvent(makeEncodedProtobuf(createdProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event).to.include({ authId: "userId", authType: "unknown" }); + }); + }); + + describe("makeChangedFirestoreEvent", () => { + it("should make event from an event without data", () => { + const event = firestore.makeChangedFirestoreEvent( + makeEvent(), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data).to.eq(undefined); + }); + + it("should make event from an updated event", () => { + const event = firestore.makeChangedFirestoreEvent( + makeEvent(makeEncodedProtobuf(updatedProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data.before.data()).to.deep.eq({ hello: "old world" }); + expect(event.data.after.data()).to.deep.eq({ hello: "new world" }); + }); + + it("should make event from a written event", () => { + const event = firestore.makeChangedFirestoreEvent( + makeEvent(makeEncodedProtobuf(writtenProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event.data.before.data()).to.deep.eq({}); + expect(event.data.after.data()).to.deep.eq({ hello: "a new world" }); + }); + }); + + it("should include auth fields if provided in raw event", () => { + const event = firestore.makeChangedFirestoreEvent( + makeAuthEvent(makeEncodedProtobuf(writtenProto)), + firestore.makeParams("foo/fGRodw71mHutZ4wGDuT8", new PathPattern("foo/{bar}")) + ); + + expect(event).to.include({ authId: "userId", authType: "unknown" }); + }); + + describe("makeEndpoint", () => { + it("should make an endpoint with a document path pattern", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const ep = firestore.makeEndpoint( + firestore.createdEventType, + { region: "us-central1" }, + "foo/{bar}", + "my-db", + "my-ns" + ); + + expect(ep).to.deep.eq(expectedEp); + }); + + it("should make an endpoint with a document filter", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "my-db", + namespace: "my-ns", + document: "foo/fGRodw71mHutZ4wGDuT8", + }, + {} + ); + expectedEp["region"] = ["us-central1"]; + + const ep = firestore.makeEndpoint( + firestore.createdEventType, + { region: "us-central1" }, + "foo/fGRodw71mHutZ4wGDuT8", + "my-db", + "my-ns" + ); + + expect(ep).to.deep.eq(expectedEp); + }); + }); + + describe("onOperation", () => { + it("should create a func on a created operation", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onOperation(firestore.createdEventType, "foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a created operation with opts", () => { + const expectedEp = makeExpectedEp( + firestore.createdEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onOperation( + firestore.createdEventType, + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a deleted operation", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onOperation(firestore.deletedEventType, "foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a deleted operation with opts", () => { + const expectedEp = makeExpectedEp( + firestore.deletedEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onOperation( + firestore.deletedEventType, + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + }); + + describe("onChangedOperation", () => { + it("should create a func on a updated operation", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onChangedOperation(firestore.updatedEventType, "foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a updated operation with opts", () => { + const expectedEp = makeExpectedEp( + firestore.updatedEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onChangedOperation( + firestore.updatedEventType, + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a written operation", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventType, + { + database: "(default)", + namespace: "(default)", + }, + { + document: "foo/{bar}", + } + ); + + const func = firestore.onChangedOperation(firestore.writtenEventType, "foo/{bar}", () => 2); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + + it("should create a func on a written operation with opts", () => { + const expectedEp = makeExpectedEp( + firestore.writtenEventType, + { + database: "my-db", + namespace: "my-ns", + }, + { + document: "foo/{bar}", + } + ); + expectedEp["region"] = ["us-central1"]; + + const func = firestore.onChangedOperation( + firestore.writtenEventType, + { + region: "us-central1", + document: "foo/{bar}", + database: "my-db", + namespace: "my-ns", + }, + () => 2 + ); + + expect(func.run(true as any)).to.eq(2); + expect(func.__endpoint).to.deep.eq(expectedEp); + }); + }); +}); diff --git a/spec/v2/providers/fixtures.ts b/spec/v2/providers/fixtures.ts new file mode 100644 index 000000000..1766a3dfb --- /dev/null +++ b/spec/v2/providers/fixtures.ts @@ -0,0 +1,62 @@ +import { ManifestEndpoint } from "../../../src/runtime/manifest"; +import { TriggerAnnotation } from "../../../src/v2/core"; +import * as options from "../../../src/v2/options"; + +export { MINIMAL_V1_ENDPOINT, MINIMAL_V2_ENDPOINT } from "../../fixtures"; + +export const FULL_OPTIONS: options.GlobalOptions = { + region: "us-west1", + memory: "512MiB", + timeoutSeconds: 60, + minInstances: 1, + maxInstances: 3, + concurrency: 20, + vpcConnector: "aConnector", + vpcConnectorEgressSettings: "ALL_TRAFFIC", + serviceAccount: "root@", + ingressSettings: "ALLOW_ALL", + cpu: "gcf_gen1", + labels: { + hello: "world", + }, + secrets: ["MY_SECRET"], +}; + +export const FULL_TRIGGER: TriggerAnnotation = { + platform: "gcfv2", + regions: ["us-west1"], + availableMemoryMb: 512, + timeout: "60s", + minInstances: 1, + maxInstances: 3, + concurrency: 20, + vpcConnector: "aConnector", + vpcConnectorEgressSettings: "ALL_TRAFFIC", + serviceAccountEmail: "root@aProject.iam.gserviceaccount.com", + ingressSettings: "ALLOW_ALL", + labels: { + hello: "world", + }, + secrets: ["MY_SECRET"], +}; + +export const FULL_ENDPOINT: ManifestEndpoint = { + platform: "gcfv2", + region: ["us-west1"], + availableMemoryMb: 512, + timeoutSeconds: 60, + minInstances: 1, + maxInstances: 3, + concurrency: 20, + vpc: { + connector: "aConnector", + egressSettings: "ALL_TRAFFIC", + }, + serviceAccountEmail: "root@", + ingressSettings: "ALLOW_ALL", + cpu: "gcf_gen1", + labels: { + hello: "world", + }, + secretEnvironmentVariables: [{ key: "MY_SECRET" }], +}; diff --git a/spec/v2/providers/https.spec.ts b/spec/v2/providers/https.spec.ts new file mode 100644 index 000000000..9ab5d52a8 --- /dev/null +++ b/spec/v2/providers/https.spec.ts @@ -0,0 +1,693 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as sinon from "sinon"; + +import * as debug from "../../../src/common/debug"; +import * as options from "../../../src/v2/options"; +import * as https from "../../../src/v2/providers/https"; +import { expectedResponseHeaders, MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT, FULL_OPTIONS, FULL_TRIGGER } from "./fixtures"; +import { onInit } from "../../../src/v2/core"; +import { Handler } from "express"; +import { genkit } from "genkit"; +import { clearParams, defineList, Expression } from "../../../src/params"; + +function request(args: { + data?: any; + auth?: Record; + headers?: Record; + method?: MockRequest["method"]; +}): any { + let headers: Record = {}; + if (args.method !== "POST") { + headers["content-type"] = "application/json"; + } + headers = { + ...headers, + ...args.headers, + }; + if (args.auth) { + headers["authorization"] = `bearer ignored.${Buffer.from( + JSON.stringify(args.auth), + "utf-8" + ).toString("base64")}.ignored`; + } + const ret = new MockRequest({ data: args.data || {} }, headers); + ret.method = args.method || "POST"; + return ret; +} + +describe("onRequest", () => { + beforeEach(() => { + options.setGlobalOptions({}); + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + options.setGlobalOptions({}); + delete process.env.GCLOUD_PROJECT; + }); + + it("should return a minimal trigger/endpoint with appropriate values", () => { + const result = https.onRequest((req, res) => { + res.send(200); + }); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + httpsTrigger: { + allowInsecure: false, + }, + labels: {}, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + httpsTrigger: {}, + labels: {}, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = https.onRequest( + { + ...FULL_OPTIONS, + region: ["us-west1", "us-central1"], + invoker: ["service-account1@", "service-account2@"], + }, + (req, res) => { + res.send(200); + } + ); + + expect(result.__trigger).to.deep.equal({ + ...FULL_TRIGGER, + httpsTrigger: { + allowInsecure: false, + invoker: ["service-account1@", "service-account2@"], + }, + regions: ["us-west1", "us-central1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + httpsTrigger: { + invoker: ["service-account1@", "service-account2@"], + }, + region: ["us-west1", "us-central1"], + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + invoker: "public", + }); + + const result = https.onRequest( + { + region: ["us-west1", "us-central1"], + minInstances: 3, + invoker: "private", + }, + (req, res) => { + res.send(200); + } + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + httpsTrigger: { + allowInsecure: false, + invoker: ["private"], + }, + concurrency: 20, + minInstances: 3, + regions: ["us-west1", "us-central1"], + labels: {}, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + httpsTrigger: { + invoker: ["private"], + }, + concurrency: 20, + minInstances: 3, + region: ["us-west1", "us-central1"], + labels: {}, + }); + }); + + it("should take globalOptions invoker", () => { + options.setGlobalOptions({ + invoker: "private", + }); + + const result = https.onRequest((req, res) => { + res.send(); + }); + + expect(result.__trigger).to.deep.eq({ + platform: "gcfv2", + httpsTrigger: { + allowInsecure: false, + invoker: ["private"], + }, + labels: {}, + }); + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + httpsTrigger: { + invoker: ["private"], + }, + labels: {}, + }); + }); + + it("should be an express handler", async () => { + const func = https.onRequest((req, res) => { + res.send("Works"); + }); + + const req = request({ headers: { origin: "example.com" } }); + const resp = await runHandler(func, req); + expect(resp.body).to.equal("Works"); + }); + + it("should enforce CORS options", async () => { + const func = https.onRequest({ cors: "example.com" }, () => { + throw new Error("Should not reach here for OPTIONS preflight"); + }); + + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "example.com", + }, + method: "OPTIONS", + }); + + const resp = await runHandler(func, req); + expect(resp.status).to.equal(204); + expect(resp.body).to.be.undefined; + expect(resp.headers).to.deep.equal({ + "Access-Control-Allow-Methods": "GET,HEAD,PUT,PATCH,POST,DELETE", + "Access-Control-Allow-Origin": "example.com", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + }); + + it("should allow cors params", async () => { + const origins = defineList("ORIGINS"); + + try { + process.env.ORIGINS = '["example.com","example2.com"]'; + const func = https.onRequest( + { + cors: origins, + }, + (req, res) => { + res.send("42"); + } + ); + const req = request({ + headers: { + referrer: "example.com", + "content-type": "application/json", + origin: "example.com", + }, + method: "OPTIONS", + }); + + const response = await runHandler(func, req); + + expect(response.status).to.equal(204); + expect(response.headers).to.deep.equal({ + "Access-Control-Allow-Origin": "example.com", + "Access-Control-Allow-Methods": "GET,HEAD,PUT,PATCH,POST,DELETE", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + } finally { + delete process.env.ORIGINS; + clearParams(); + } + }); + + it("should add CORS headers if debug feature is enabled", async () => { + sinon.stub(debug, "isDebugFeatureEnabled").withArgs("enableCors").returns(true); + + const func = https.onRequest(() => { + throw new Error("Should not reach here for OPTIONS preflight"); + }); + + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "localhost", + }, + method: "OPTIONS", + }); + + const resp = await runHandler(func, req); + expect(resp.status).to.equal(204); + expect(resp.body).to.be.undefined; + expect(resp.headers).to.deep.equal({ + "Access-Control-Allow-Methods": "GET,HEAD,PUT,PATCH,POST,DELETE", + "Access-Control-Allow-Origin": "localhost", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + + sinon.restore(); + }); + + it("should NOT add CORS headers if debug feature is enabled and cors has value false", async () => { + sinon.stub(debug, "isDebugFeatureEnabled").withArgs("enableCors").returns(true); + + const func = https.onRequest({ cors: false }, (req, res) => { + res.status(200).send("Good"); + }); + + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "example.com", + }, + method: "OPTIONS", + }); + + const resp = await runHandler(func, req); + expect(resp.status).to.equal(200); + expect(resp.body).to.be.equal("Good"); + expect(resp.headers).to.deep.equal({}); + + sinon.restore(); + }); + + it("calls init function", async () => { + const func = https.onRequest((req, res) => { + res.status(200).send("Good"); + }); + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "example.com", + }, + method: "OPTIONS", + }); + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req); + expect(hello).to.equal("world"); + }); +}); + +describe("onCall", () => { + let origins: Expression; + beforeEach(() => { + origins = defineList("ORIGINS"); + process.env.ORIGINS = '["example.com","example2.com"]'; + + options.setGlobalOptions({}); + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + delete process.env.GCLOUD_PROJECT; + delete process.env.ORIGINS; + clearParams(); + }); + + it("should return a minimal trigger/endpoint with appropriate values", () => { + const result = https.onCall(() => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + httpsTrigger: { + allowInsecure: false, + }, + labels: { + "deployment-callable": "true", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + callableTrigger: {}, + labels: {}, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = https.onCall(FULL_OPTIONS, () => 42); + + expect(result.__trigger).to.deep.equal({ + ...FULL_TRIGGER, + httpsTrigger: { + allowInsecure: false, + }, + labels: { + ...FULL_TRIGGER.labels, + "deployment-callable": "true", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + callableTrigger: {}, + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + + const result = https.onCall( + { + region: ["us-west1", "us-central1"], + minInstances: 3, + }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + httpsTrigger: { + allowInsecure: false, + }, + concurrency: 20, + minInstances: 3, + regions: ["us-west1", "us-central1"], + labels: { + "deployment-callable": "true", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + callableTrigger: {}, + concurrency: 20, + minInstances: 3, + region: ["us-west1", "us-central1"], + labels: {}, + }); + }); + + it("has a .run method", async () => { + const cf = https.onCall((request) => { + return request; + }); + + const request: any = { + data: "data", + instanceIdToken: "token", + auth: { + uid: "abc", + token: "token", + }, + }; + await expect(cf.run(request)).to.eventually.deep.equal(request); + }); + + it("should be an express handler", async () => { + const func = https.onCall(() => 42); + + const req = request({ headers: { origin: "example.com" } }); + + const resp = await runHandler(func, req); + expect(resp.body).to.deep.equal(JSON.stringify({ result: 42 })); + }); + + it("should enforce CORS options", async () => { + const func = https.onCall({ cors: "example.com" }, () => { + throw new Error("Should not reach here for OPTIONS preflight"); + }); + + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "example.com", + }, + method: "OPTIONS", + }); + + const resp = await runHandler(func, req); + expect(resp.status).to.equal(204); + expect(resp.body).to.be.undefined; + expect(resp.headers).to.deep.equal({ + "Access-Control-Allow-Methods": "POST", + "Access-Control-Allow-Origin": "example.com", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + }); + + it("should allow cors params", async () => { + const func = https.onCall({ cors: origins }, () => 42); + const req = request({ + headers: { + referrer: "example.com", + "content-type": "application/json", + origin: "example.com", + }, + method: "OPTIONS", + }); + + const response = await runHandler(func, req); + + expect(response.status).to.equal(204); + expect(response.headers).to.deep.equal({ + "Access-Control-Allow-Origin": "example.com", + "Access-Control-Allow-Methods": "POST", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + }); + + it("overrides CORS headers if debug feature is enabled", async () => { + sinon.stub(debug, "isDebugFeatureEnabled").withArgs("enableCors").returns(true); + + const func = https.onCall({ cors: "example.com" }, () => { + throw new Error("Should not reach here for OPTIONS preflight"); + }); + const req = request({ + headers: { + "Access-Control-Request-Method": "POST", + "Access-Control-Request-Headers": "origin", + origin: "localhost", + }, + method: "OPTIONS", + }); + + const response = await runHandler(func, req); + + expect(response.status).to.equal(204); + expect(response.body).to.be.undefined; + expect(response.headers).to.deep.equal({ + "Access-Control-Allow-Methods": "POST", + "Access-Control-Allow-Origin": "localhost", + "Content-Length": "0", + Vary: "Origin, Access-Control-Request-Headers", + }); + + sinon.restore(); + }); + + it("adds CORS headers", async () => { + const func = https.onCall(() => 42); + const req = request({ headers: { origin: "example.com" } }); + const response = await runHandler(func, req); + + expect(response.status).to.equal(200); + expect(response.body).to.be.deep.equal(JSON.stringify({ result: 42 })); + expect(response.headers).to.deep.equal(expectedResponseHeaders); + }); + + // These tests pass if the code transpiles + it("allows desirable syntax", () => { + https.onCall( + (request: https.CallableRequest) => `hello, ${request.data}!` + ); + https.onCall((request: https.CallableRequest) => `hello, ${request.data}!`); + https.onCall((request: https.CallableRequest) => `hello, ${request.data}!`); + https.onCall((request: https.CallableRequest) => `Hello, ${request.data}`); + https.onCall((request: https.CallableRequest) => `Hello, ${request.data}`); + }); + + it("calls init function", async () => { + const func = https.onCall(() => 42); + + const req = request({ headers: { origin: "example.com" } }); + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req); + expect(hello).to.equal("world"); + }); + + describe("authPolicy", () => { + before(() => { + sinon.stub(debug, "isDebugFeatureEnabled").withArgs("skipTokenVerification").returns(true); + }); + + after(() => { + sinon.restore(); + }); + + it("should check isSignedIn", async () => { + const func = https.onCall( + { + authPolicy: https.isSignedIn(), + }, + () => 42 + ); + + const authResp = await runHandler(func, request({ auth: { sub: "inlined" } })); + expect(authResp.status).to.equal(200); + + const anonResp = await runHandler(func, request({})); + expect(anonResp.status).to.equal(403); + }); + + it("should check hasClaim", async () => { + const anyValue = https.onCall( + { + authPolicy: https.hasClaim("meaning"), + }, + () => "HHGTTG" + ); + const specificValue = https.onCall( + { + authPolicy: https.hasClaim("meaning", "42"), + }, + () => "HHGTG" + ); + + const cases: Array<{ fn: Handler; auth?: Record; status: number }> = [ + { fn: anyValue, auth: { meaning: "42" }, status: 200 }, + { fn: anyValue, auth: { meaning: "43" }, status: 200 }, + { fn: anyValue, auth: { order: "66" }, status: 403 }, + { fn: anyValue, status: 403 }, + { fn: specificValue, auth: { meaning: "42" }, status: 200 }, + { fn: specificValue, auth: { meaning: "43" }, status: 403 }, + { fn: specificValue, auth: { order: "66" }, status: 403 }, + { fn: specificValue, status: 403 }, + ]; + for (const test of cases) { + const resp = await runHandler(test.fn, request({ auth: test.auth })); + expect(resp.status).to.equal(test.status); + } + }); + + it("can be any callback", async () => { + const divTwo = https.onCall( + { + authPolicy: (auth, data) => data % 2 === 0, + }, + (req) => req.data / 2 + ); + + const authorized = await runHandler(divTwo, request({ data: 2 })); + expect(authorized.status).to.equal(200); + const accessDenied = await runHandler(divTwo, request({ data: 1 })); + expect(accessDenied.status).to.equal(403); + }); + }); +}); + +describe("onCallGenkit", () => { + it("calls with JSON requests", async () => { + const flow = { + __action: { + name: "test", + }, + run: sinon.stub(), + stream: sinon.stub(), + }; + flow.run.withArgs("answer").returns({ result: 42 }); + flow.stream.throws("Unexpected stream"); + + const f = https.onCallGenkit(flow); + + const req = request({ data: "answer" }); + const res = await runHandler(f, req); + expect(JSON.parse(res.body)).to.deep.equal({ result: 42 }); + }); + + it("Streams with SSE requests", async () => { + const flow = { + __action: { + name: "test", + }, + run: sinon.stub(), + stream: sinon.stub(), + }; + flow.run.onFirstCall().throws(); + flow.stream.withArgs("answer").returns({ + stream: (async function* () { + await Promise.resolve(); + yield 1; + await Promise.resolve(); + yield 2; + })(), + output: Promise.resolve(42), + }); + + const f = https.onCallGenkit(flow); + + const req = request({ data: "answer", headers: { accept: "text/event-stream" } }); + const res = await runHandler(f, req); + expect(res.body).to.equal( + ['data: {"message":1}', 'data: {"message":2}', 'data: {"result":42}', ""].join("\n\n") + ); + }); + + it("Exports types that are compatible with the genkit library (compilation is success)", () => { + const ai = genkit({}); + const flow = ai.defineFlow("test", () => 42); + https.onCallGenkit(flow); + }); +}); diff --git a/spec/v2/providers/httpsAsync.spec.ts b/spec/v2/providers/httpsAsync.spec.ts new file mode 100644 index 000000000..78ff12bf1 --- /dev/null +++ b/spec/v2/providers/httpsAsync.spec.ts @@ -0,0 +1,49 @@ +import { expect } from "chai"; +import * as sinon from "sinon"; +import * as https from "../../../src/v2/providers/https"; +import * as logger from "../../../src/logger"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; + +describe("v2.https.onRequest async", () => { + let loggerSpy: sinon.SinonSpy; + + beforeEach(() => { + loggerSpy = sinon.spy(logger, "error"); + }); + + afterEach(() => { + loggerSpy.restore(); + }); + + it("should catch and log unhandled rejections in async onRequest handlers", async () => { + const err = new Error("boom"); + const fn = https.onRequest(async (_req, _res) => { + await Promise.resolve(); + throw err; + }); + + const req = new MockRequest({}, {}); + req.method = "GET"; + + const result = await runHandler(fn, req as any); + + expect(loggerSpy.calledWith("Unhandled error", err)).to.be.true; + expect(result.status).to.equal(500); + expect(result.body).to.equal("Internal Server Error"); + }); + + it("should not log if handler completes successfully", async () => { + const fn = https.onRequest(async (_req, res) => { + await Promise.resolve(); + res.send(200); + }); + + const req = new MockRequest({}, {}); + req.method = "GET"; + + await runHandler(fn, req as any); + + expect(loggerSpy.called).to.be.false; + }); +}); diff --git a/spec/v2/providers/identity.spec.ts b/spec/v2/providers/identity.spec.ts new file mode 100644 index 000000000..dbda1189c --- /dev/null +++ b/spec/v2/providers/identity.spec.ts @@ -0,0 +1,470 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import { expect } from "chai"; +import * as identity from "../../../src/v2/providers/identity"; +import { MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { onInit } from "../../../src/v2/core"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; + +const IDENTITY_TOOLKIT_API = "identitytoolkit.googleapis.com"; +const REGION = "us-west1"; + +const BEFORE_CREATE_TRIGGER = { + eventType: "providers/cloud.auth/eventTypes/user.beforeCreate", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, +}; + +const BEFORE_SIGN_IN_TRIGGER = { + eventType: "providers/cloud.auth/eventTypes/user.beforeSignIn", + options: { + accessToken: false, + idToken: false, + refreshToken: false, + }, +}; + +const BEFORE_EMAIL_TRIGGER = { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + options: {}, +}; + +const BEFORE_SMS_TRIGGER = { + eventType: "providers/cloud.auth/eventTypes/user.beforeSendSms", + options: {}, +}; + +const opts: identity.BlockingOptions = { + accessToken: true, + refreshToken: false, + minInstances: 1, + region: REGION, +}; + +describe("identity", () => { + describe("beforeUserCreated", () => { + it("should accept a handler", () => { + const fn = identity.beforeUserCreated(() => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_CREATE_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should accept options and a handler", () => { + const fn = identity.beforeUserCreated(opts, () => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_CREATE_TRIGGER, + options: { + ...BEFORE_CREATE_TRIGGER.options, + accessToken: true, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("calls init function", async () => { + const func = identity.beforeUserCreated(() => null); + + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + origin: "example.com", + } + ); + req.method = "POST"; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req as any); + expect(hello).to.equal("world"); + }); + }); + + describe("beforeUserSignedIn", () => { + it("should accept a handler", () => { + const fn = identity.beforeUserSignedIn(() => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_SIGN_IN_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should accept options and a handler", () => { + const fn = identity.beforeUserSignedIn(opts, () => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_SIGN_IN_TRIGGER, + options: { + ...BEFORE_SIGN_IN_TRIGGER.options, + accessToken: true, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("calls init function", async () => { + const func = identity.beforeUserSignedIn(() => null); + + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + origin: "example.com", + } + ); + req.method = "POST"; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req as any); + expect(hello).to.equal("world"); + }); + }); + + describe("beforeEmailSent", () => { + it("should accept a handler", () => { + const fn = identity.beforeEmailSent(() => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_EMAIL_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should accept options and a handler", () => { + const fn = identity.beforeEmailSent( + { region: opts.region, minInstances: opts.minInstances }, + () => Promise.resolve() + ); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_EMAIL_TRIGGER, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("beforeSmsSent", () => { + it("should accept a handler", () => { + const fn = identity.beforeSmsSent(() => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_SMS_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should accept options and a handler", () => { + const fn = identity.beforeSmsSent( + { region: opts.region, minInstances: opts.minInstances }, + () => Promise.resolve() + ); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_SMS_TRIGGER, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("beforeOperation", () => { + it("should handle eventType and handler for before create events", () => { + const fn = identity.beforeOperation("beforeCreate", () => Promise.resolve(), undefined); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_CREATE_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should handle eventType and handler for before sign in events", () => { + const fn = identity.beforeOperation("beforeSignIn", () => Promise.resolve(), undefined); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_SIGN_IN_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should handle eventType and handler for before email events", () => { + const fn = identity.beforeOperation("beforeSendEmail", () => Promise.resolve(), undefined); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_EMAIL_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should handle eventType and handler for before email events", () => { + const fn = identity.beforeOperation("beforeSendEmail", () => Promise.resolve(), undefined); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + blockingTrigger: BEFORE_EMAIL_TRIGGER, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + it("should handle eventType, options, and handler for before create events", () => { + const fn = identity.beforeOperation("beforeCreate", opts, () => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_CREATE_TRIGGER, + options: { + ...BEFORE_CREATE_TRIGGER.options, + accessToken: true, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should handle eventType, options, and handler for before sign in events", () => { + const fn = identity.beforeOperation("beforeSignIn", opts, () => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_SIGN_IN_TRIGGER, + options: { + ...BEFORE_SIGN_IN_TRIGGER.options, + accessToken: true, + }, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + + it("should handle eventType, options, and handler for before send email events", () => { + const fn = identity.beforeOperation("beforeSendEmail", opts, () => Promise.resolve()); + + expect(fn.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + minInstances: 1, + region: [REGION], + blockingTrigger: { + ...BEFORE_EMAIL_TRIGGER, + }, + }); + expect(fn.__requiredAPIs).to.deep.equal([ + { + api: IDENTITY_TOOLKIT_API, + reason: "Needed for auth blocking functions", + }, + ]); + }); + }); + + describe("getOpts", () => { + it("should parse an empty object", () => { + const internalOpts = identity.getOpts({}); + + expect(internalOpts).to.deep.equal({ + opts: {}, + accessToken: false, + idToken: false, + refreshToken: false, + }); + }); + + it("should parse global options", () => { + const internalOpts = identity.getOpts({ region: "us-central1", cpu: 2 }); + + expect(internalOpts).to.deep.equal({ + opts: { + region: "us-central1", + cpu: 2, + }, + accessToken: false, + idToken: false, + refreshToken: false, + }); + }); + + it("should a full options", () => { + const internalOpts = identity.getOpts({ + region: "us-central1", + cpu: 2, + accessToken: true, + idToken: false, + refreshToken: true, + }); + + expect(internalOpts).to.deep.equal({ + opts: { + region: "us-central1", + cpu: 2, + }, + accessToken: true, + idToken: false, + refreshToken: true, + }); + }); + }); +}); diff --git a/spec/v2/providers/pubsub.spec.ts b/spec/v2/providers/pubsub.spec.ts new file mode 100644 index 000000000..d498b1b42 --- /dev/null +++ b/spec/v2/providers/pubsub.spec.ts @@ -0,0 +1,196 @@ +import { expect } from "chai"; + +import { CloudEvent } from "../../../src/v2/core"; +import * as options from "../../../src/v2/options"; +import * as pubsub from "../../../src/v2/providers/pubsub"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT, FULL_OPTIONS, FULL_TRIGGER } from "./fixtures"; + +const EVENT_TRIGGER = { + eventType: "google.cloud.pubsub.topic.v1.messagePublished", + resource: "projects/aProject/topics/topic", +}; + +const ENDPOINT_EVENT_TRIGGER = { + eventType: "google.cloud.pubsub.topic.v1.messagePublished", + eventFilters: { + topic: "topic", + }, + retry: false, +}; + +describe("onMessagePublished", () => { + beforeEach(() => { + options.setGlobalOptions({}); + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should return a minimal trigger/endpoint with appropriate values", () => { + const result = pubsub.onMessagePublished("topic", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + eventTrigger: EVENT_TRIGGER, + labels: {}, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + eventTrigger: ENDPOINT_EVENT_TRIGGER, + labels: {}, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = pubsub.onMessagePublished({ ...FULL_OPTIONS, topic: "topic" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + ...FULL_TRIGGER, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + + const result = pubsub.onMessagePublished( + { + topic: "topic", + region: "us-west1", + minInstances: 3, + }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + regions: ["us-west1"], + labels: {}, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + region: ["us-west1"], + labels: {}, + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + + it("should convert retry option if appropriate", () => { + const result = pubsub.onMessagePublished( + { + topic: "topic", + region: "us-west1", + minInstances: 3, + retry: true, + }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + minInstances: 3, + regions: ["us-west1"], + labels: {}, + eventTrigger: EVENT_TRIGGER, + failurePolicy: { retry: true }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + minInstances: 3, + region: ["us-west1"], + labels: {}, + eventTrigger: { ...ENDPOINT_EVENT_TRIGGER, retry: true }, + }); + }); + + it("should have a .run method", () => { + const func = pubsub.onMessagePublished("topic", (event) => event); + + const res = func.run("input" as any); + + expect(res).to.equal("input"); + }); + + it("should parse pubsub messages", async () => { + let json: unknown; + const messageJSON = { + messageId: "uuid", + data: Buffer.from(JSON.stringify({ hello: "world" })).toString("base64"), + attributes: { key: "value" }, + orderingKey: "orderingKey", + publishTime: new Date(Date.now()).toISOString(), + }; + const publishData: pubsub.MessagePublishedData = { + message: messageJSON as any, + subscription: "projects/aProject/subscriptions/aSubscription", + }; + const event: CloudEvent> = { + specversion: "1.0", + source: "//pubsub.googleapis.com/projects/aProject/topics/topic", + id: "uuid", + type: EVENT_TRIGGER.eventType, + time: messageJSON.publishTime, + data: publishData, + }; + + const func = pubsub.onMessagePublished("topic", (event) => { + json = event.data.message.json; + return event; + }); + + const eventAgain = await func(event); + + // Deep equal uses JSON equality, so we'll still match even though + // Message is a class and we passed an interface. + expect(eventAgain).to.deep.equal(event); + + expect(json).to.deep.equal({ hello: "world" }); + }); + + // These tests pass if the transpiler works + it("allows desirable syntax", () => { + pubsub.onMessagePublished( + "topic", + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (event: CloudEvent>) => undefined + ); + pubsub.onMessagePublished( + "topic", + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (event: CloudEvent) => undefined + ); + pubsub.onMessagePublished( + "topic", + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (event: CloudEvent>) => undefined + ); + pubsub.onMessagePublished( + "topic", + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (event: CloudEvent) => undefined + ); + }); +}); diff --git a/spec/v2/providers/remoteConfig.spec.ts b/spec/v2/providers/remoteConfig.spec.ts new file mode 100644 index 000000000..3b32ed111 --- /dev/null +++ b/spec/v2/providers/remoteConfig.spec.ts @@ -0,0 +1,95 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as remoteConfig from "../../../src/v2/providers/remoteConfig"; +import * as options from "../../../src/v2/options"; +import { MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { CloudEvent, onInit } from "../../../src/v2/core"; + +describe("onConfigUpdated", () => { + afterEach(() => { + options.setGlobalOptions({}); + }); + + it("should create a function with a handler", async () => { + const fn = remoteConfig.onConfigUpdated(() => 2); + + expect(fn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: remoteConfig.eventType, + eventFilters: {}, + retry: false, + }, + }); + await expect(fn(1 as any)).to.eventually.eq(2); + }); + + it("should create a function with opts and a handler", async () => { + options.setGlobalOptions({ + memory: "512MiB", + region: "us-west1", + }); + + const fn = remoteConfig.onConfigUpdated( + { + region: "us-central1", + retry: true, + }, + () => 2 + ); + + expect(fn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + availableMemoryMb: 512, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: remoteConfig.eventType, + eventFilters: {}, + retry: true, + }, + }); + await expect(fn(1 as any)).to.eventually.eq(2); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await remoteConfig.onConfigUpdated(() => null)(event); + expect(hello).to.equal("world"); + }); +}); diff --git a/spec/v2/providers/scheduler.spec.ts b/spec/v2/providers/scheduler.spec.ts new file mode 100644 index 000000000..fcd03cf1f --- /dev/null +++ b/spec/v2/providers/scheduler.spec.ts @@ -0,0 +1,215 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import { ManifestEndpoint } from "../../../src/runtime/manifest"; +import * as options from "../../../src/v2/options"; +import * as schedule from "../../../src/v2/providers/scheduler"; +import { MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { onInit } from "../../../src/v2/core"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; + +const MINIMAL_SCHEDULE_TRIGGER: ManifestEndpoint["scheduleTrigger"] = { + schedule: "", + timeZone: options.RESET_VALUE, + retryConfig: { + retryCount: options.RESET_VALUE, + maxRetrySeconds: options.RESET_VALUE, + minBackoffSeconds: options.RESET_VALUE, + maxBackoffSeconds: options.RESET_VALUE, + maxDoublings: options.RESET_VALUE, + }, +}; + +describe("schedule", () => { + describe("getOpts", () => { + it("should handle a schedule", () => { + expect(schedule.getOpts("* * * * *")).to.deep.eq({ + schedule: "* * * * *", + opts: {}, + }); + }); + + it("should handle full options", () => { + const options: schedule.ScheduleOptions = { + schedule: "* * * * *", + timeZone: "utc", + retryCount: 3, + maxRetrySeconds: 1, + minBackoffSeconds: 2, + maxBackoffSeconds: 3, + maxDoublings: 4, + memory: "128MiB", + region: "us-central1", + }; + + expect(schedule.getOpts(options)).to.deep.eq({ + schedule: "* * * * *", + timeZone: "utc", + retryConfig: { + retryCount: 3, + maxRetrySeconds: 1, + minBackoffSeconds: 2, + maxBackoffSeconds: 3, + maxDoublings: 4, + }, + opts: { + ...options, + memory: "128MiB", + region: "us-central1", + }, + }); + }); + }); + + describe("onSchedule", () => { + it("should create a schedule function given a schedule", () => { + const schfn = schedule.onSchedule("* * * * *", () => console.log(1)); + + expect(schfn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + scheduleTrigger: { + ...MINIMAL_SCHEDULE_TRIGGER, + schedule: "* * * * *", + }, + }); + expect(schfn.__requiredAPIs).to.deep.eq([ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]); + }); + + it("should create a schedule function given options", () => { + const schfn = schedule.onSchedule( + { + schedule: "* * * * *", + timeZone: "utc", + retryCount: 3, + maxRetrySeconds: 10, + minBackoffSeconds: 11, + maxBackoffSeconds: 12, + maxDoublings: 2, + region: "us-central1", + labels: { key: "val" }, + }, + () => undefined + ); + + expect(schfn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: { key: "val" }, + region: ["us-central1"], + scheduleTrigger: { + schedule: "* * * * *", + timeZone: "utc", + retryConfig: { + retryCount: 3, + maxRetrySeconds: 10, + minBackoffSeconds: 11, + maxBackoffSeconds: 12, + maxDoublings: 2, + }, + }, + }); + expect(schfn.__requiredAPIs).to.deep.eq([ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]); + }); + + it("should create a schedule function with preserveExternalChanges", () => { + const schfn = schedule.onSchedule( + { + schedule: "* * * * *", + preserveExternalChanges: true, + }, + () => console.log(1) + ); + + expect(schfn.__endpoint).to.deep.eq({ + platform: "gcfv2", + labels: {}, + scheduleTrigger: { + schedule: "* * * * *", + timeZone: undefined, + retryConfig: { + retryCount: undefined, + maxRetrySeconds: undefined, + minBackoffSeconds: undefined, + maxBackoffSeconds: undefined, + maxDoublings: undefined, + }, + }, + }); + expect(schfn.__requiredAPIs).to.deep.eq([ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]); + }); + + it("should have a .run method", async () => { + const testObj = { + foo: "bar", + }; + const schfn = schedule.onSchedule("* * * * *", () => { + testObj.foo = "newBar"; + }); + + await schfn.run("input" as any); + + expect(testObj).to.deep.eq({ + foo: "newBar", + }); + }); + + it("calls init function", async () => { + const func = schedule.onSchedule("* * * * *", () => null); + + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + origin: "example.com", + } + ); + req.method = "POST"; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req as any); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/storage.spec.ts b/spec/v2/providers/storage.spec.ts new file mode 100644 index 000000000..06324e9ab --- /dev/null +++ b/spec/v2/providers/storage.spec.ts @@ -0,0 +1,734 @@ +import { expect } from "chai"; +import * as config from "../../../src/common/config"; +import * as options from "../../../src/v2/options"; +import * as storage from "../../../src/v2/providers/storage"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT, FULL_OPTIONS, FULL_TRIGGER } from "./fixtures"; +import { CloudEvent, onInit } from "../../../src/v2/core"; + +const EVENT_TRIGGER = { + eventType: "event-type", + resource: "some-bucket", +}; + +const ENDPOINT_EVENT_TRIGGER = { + eventType: "event-type", + eventFilters: { + bucket: "some-bucket", + }, + retry: false, +}; + +const DEFAULT_BUCKET_EVENT_FILTER = { + bucket: "default-bucket", +}; + +const SPECIFIC_BUCKET_EVENT_FILTER = { + bucket: "my-bucket", +}; + +describe("v2/storage", () => { + describe("getOptsAndBucket", () => { + it("should return the default bucket with empty opts", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const [opts, bucket] = storage.getOptsAndBucket({}); + + config.resetCache(); + expect(opts).to.deep.equal({}); + expect(bucket).to.eq("default-bucket"); + }); + + it("should return the default bucket with opts param", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const [opts, bucket] = storage.getOptsAndBucket({ region: "us-west1" }); + + config.resetCache(); + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(bucket).to.eq("default-bucket"); + }); + + it("should return the given bucket", () => { + const [opts, bucket] = storage.getOptsAndBucket("my-bucket"); + + expect(opts).to.deep.equal({}); + expect(bucket).to.eq("my-bucket"); + }); + + it("should return the given bucket and opts", () => { + const [opts, bucket] = storage.getOptsAndBucket({ + bucket: "my-bucket", + region: "us-west1", + }); + + expect(opts).to.deep.equal({ region: "us-west1" }); + expect(bucket).to.eq("my-bucket"); + }); + }); + + describe("onOperation", () => { + beforeEach(() => { + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + options.setGlobalOptions({}); + config.resetCache(); + delete process.env.GCLOUD_PROJECT; + }); + + it("should create a minimal trigger/endpoint with bucket", () => { + const result = storage.onOperation("event-type", "some-bucket", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + + it("should create a minimal trigger/endpoint with opts", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onOperation("event-type", { region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...EVENT_TRIGGER, + resource: "default-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_EVENT_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("should create a minimal trigger with bucket with opts and bucket", () => { + const result = storage.onOperation("event-type", { bucket: "some-bucket" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = storage.onOperation( + "event-type", + { + ...FULL_OPTIONS, + bucket: "some-bucket", + }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + ...FULL_TRIGGER, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + + const result = storage.onOperation( + "event-type", + { + bucket: "some-bucket", + region: "us-west1", + minInstances: 3, + }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + regions: ["us-west1"], + labels: {}, + eventTrigger: EVENT_TRIGGER, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + region: ["us-west1"], + labels: {}, + eventTrigger: ENDPOINT_EVENT_TRIGGER, + }); + }); + }); + + describe("onObjectArchived", () => { + const ARCHIVED_TRIGGER = { + ...EVENT_TRIGGER, + eventType: storage.archivedEvent, + }; + const ENDPOINT_ARCHIVED_TRIGGER = { + ...ENDPOINT_EVENT_TRIGGER, + eventType: storage.archivedEvent, + }; + + afterEach(() => { + config.resetCache(); + }); + + it("should accept only handler", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onObjectArchived(() => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ARCHIVED_TRIGGER, + resource: "default-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_ARCHIVED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept bucket and handler", () => { + const result = storage.onObjectArchived("my-bucket", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ARCHIVED_TRIGGER, + resource: "my-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_ARCHIVED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept opts and handler", () => { + const result = storage.onObjectArchived( + { bucket: "my-bucket", region: "us-west1" }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ARCHIVED_TRIGGER, + resource: "my-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_ARCHIVED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("should accept opts and handler, default bucket", () => { + config.resetCache({ storageBucket: "default-bucket" }); + const result = storage.onObjectArchived({ region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ARCHIVED_TRIGGER, + resource: "default-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_ARCHIVED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await storage.onObjectArchived("bucket", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onObjectFinalized", () => { + const FINALIZED_TRIGGER = { + ...EVENT_TRIGGER, + eventType: storage.finalizedEvent, + }; + const ENDPOINT_FINALIZED_TRIGGER = { + ...ENDPOINT_EVENT_TRIGGER, + eventType: storage.finalizedEvent, + }; + + afterEach(() => { + config.resetCache(); + }); + + it("should accept only handler", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onObjectFinalized(() => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...FINALIZED_TRIGGER, + resource: "default-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_FINALIZED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept bucket and handler", () => { + const result = storage.onObjectFinalized("my-bucket", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...FINALIZED_TRIGGER, + resource: "my-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_FINALIZED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept opts and handler", () => { + const result = storage.onObjectFinalized( + { bucket: "my-bucket", region: "us-west1" }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...FINALIZED_TRIGGER, + resource: "my-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_FINALIZED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("should accept opts and handler, default bucket", () => { + config.resetCache({ storageBucket: "default-bucket" }); + const result = storage.onObjectFinalized({ region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...FINALIZED_TRIGGER, + resource: "default-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_FINALIZED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await storage.onObjectFinalized("bucket", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onObjectDeleted", () => { + const DELETED_TRIGGER = { + ...EVENT_TRIGGER, + eventType: storage.deletedEvent, + }; + const ENDPOINT_DELETED_TRIGGER = { + ...ENDPOINT_EVENT_TRIGGER, + eventType: storage.deletedEvent, + }; + + afterEach(() => { + config.resetCache(); + }); + + it("should accept only handler", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onObjectDeleted(() => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...DELETED_TRIGGER, + resource: "default-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_DELETED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept bucket and handler", () => { + const result = storage.onObjectDeleted("my-bucket", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...DELETED_TRIGGER, + resource: "my-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_DELETED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept opts and handler", () => { + const result = storage.onObjectDeleted({ bucket: "my-bucket", region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...DELETED_TRIGGER, + resource: "my-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_DELETED_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("should accept opts and handler, default bucket", () => { + config.resetCache({ storageBucket: "default-bucket" }); + const result = storage.onObjectDeleted({ region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...DELETED_TRIGGER, + resource: "default-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_DELETED_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await storage.onObjectDeleted("bucket", () => null)(event); + expect(hello).to.equal("world"); + }); + }); + + describe("onObjectMetadataUpdated", () => { + const METADATA_TRIGGER = { + ...EVENT_TRIGGER, + eventType: storage.metadataUpdatedEvent, + }; + const ENDPOINT_METADATA_TRIGGER = { + ...ENDPOINT_EVENT_TRIGGER, + eventType: storage.metadataUpdatedEvent, + }; + + afterEach(() => { + config.resetCache(); + }); + + it("should accept only handler", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onObjectMetadataUpdated(() => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...METADATA_TRIGGER, + resource: "default-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_METADATA_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept bucket and handler", () => { + const result = storage.onObjectMetadataUpdated("my-bucket", () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...METADATA_TRIGGER, + resource: "my-bucket", + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_METADATA_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + }); + }); + + it("should accept opts and handler", () => { + const result = storage.onObjectMetadataUpdated( + { bucket: "my-bucket", region: "us-west1" }, + () => 42 + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...METADATA_TRIGGER, + resource: "my-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_METADATA_TRIGGER, + eventFilters: SPECIFIC_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("should accept opts and handler, default bucket", () => { + config.resetCache({ storageBucket: "default-bucket" }); + + const result = storage.onObjectMetadataUpdated({ region: "us-west1" }, () => 42); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...METADATA_TRIGGER, + resource: "default-bucket", + }, + regions: ["us-west1"], + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + ...ENDPOINT_METADATA_TRIGGER, + eventFilters: DEFAULT_BUCKET_EVENT_FILTER, + }, + region: ["us-west1"], + }); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await storage.onObjectMetadataUpdated("bucket", () => null)(event); + expect(hello).to.equal("world"); + }); + }); +}); diff --git a/spec/v2/providers/tasks.spec.ts b/spec/v2/providers/tasks.spec.ts new file mode 100644 index 000000000..46ffd7a0a --- /dev/null +++ b/spec/v2/providers/tasks.spec.ts @@ -0,0 +1,324 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; + +import { ManifestEndpoint } from "../../../src/runtime/manifest"; +import * as options from "../../../src/v2/options"; +import { onTaskDispatched, Request } from "../../../src/v2/providers/tasks"; +import { MockRequest } from "../../fixtures/mockrequest"; +import { runHandler } from "../../helper"; +import { FULL_ENDPOINT, MINIMAL_V2_ENDPOINT, FULL_OPTIONS, FULL_TRIGGER } from "./fixtures"; +import { onInit } from "../../../src/v2/core"; + +const MINIMIAL_TASK_QUEUE_TRIGGER: ManifestEndpoint["taskQueueTrigger"] = { + rateLimits: { + maxConcurrentDispatches: options.RESET_VALUE, + maxDispatchesPerSecond: options.RESET_VALUE, + }, + retryConfig: { + maxAttempts: options.RESET_VALUE, + maxBackoffSeconds: options.RESET_VALUE, + maxDoublings: options.RESET_VALUE, + maxRetrySeconds: options.RESET_VALUE, + minBackoffSeconds: options.RESET_VALUE, + }, +}; + +describe("onTaskDispatched", () => { + beforeEach(() => { + options.setGlobalOptions({}); + process.env.GCLOUD_PROJECT = "aProject"; + }); + + afterEach(() => { + delete process.env.GCLOUD_PROJECT; + }); + + it("should return a minimal trigger/endpoint with appropriate values", () => { + const result = onTaskDispatched(() => undefined); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + taskQueueTrigger: {}, + labels: {}, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + taskQueueTrigger: MINIMIAL_TASK_QUEUE_TRIGGER, + }); + }); + + it("should take globalOptions invoker", () => { + options.setGlobalOptions({ + invoker: "private", + }); + + const result = onTaskDispatched(() => undefined); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + taskQueueTrigger: { + invoker: ["private"], + }, + labels: {}, + }); + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + taskQueueTrigger: { + ...MINIMIAL_TASK_QUEUE_TRIGGER, + invoker: ["private"], + }, + }); + }); + + it("should create a complex trigger/endpoint with appropriate values", () => { + const result = onTaskDispatched( + { + ...FULL_OPTIONS, + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + maxDoublings: 3, + minBackoffSeconds: 1, + maxBackoffSeconds: 2, + }, + rateLimits: { + maxConcurrentDispatches: 5, + maxDispatchesPerSecond: 10, + }, + invoker: "private", + }, + () => undefined + ); + + expect(result.__trigger).to.deep.equal({ + ...FULL_TRIGGER, + taskQueueTrigger: { + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + maxDoublings: 3, + minBackoffSeconds: 1, + maxBackoffSeconds: 2, + }, + rateLimits: { + maxConcurrentDispatches: 5, + maxDispatchesPerSecond: 10, + }, + invoker: ["private"], + }, + }); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + taskQueueTrigger: { + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + maxDoublings: 3, + minBackoffSeconds: 1, + maxBackoffSeconds: 2, + }, + rateLimits: { + maxConcurrentDispatches: 5, + maxDispatchesPerSecond: 10, + }, + invoker: ["private"], + }, + }); + }); + + it("should return a minimal endpoint without preserveExternalChanges set", () => { + const result = onTaskDispatched( + { + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + }, + rateLimits: { + maxDispatchesPerSecond: 10, + }, + }, + () => undefined + ); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + taskQueueTrigger: { + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + maxBackoffSeconds: options.RESET_VALUE, + maxDoublings: options.RESET_VALUE, + minBackoffSeconds: options.RESET_VALUE, + }, + rateLimits: { + maxDispatchesPerSecond: 10, + maxConcurrentDispatches: options.RESET_VALUE, + }, + }, + }); + }); + + it("should create a complex endpoint with preserveExternalChanges set", () => { + const result = onTaskDispatched( + { + ...FULL_OPTIONS, + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + }, + rateLimits: { + maxDispatchesPerSecond: 10, + }, + invoker: "private", + preserveExternalChanges: true, + }, + () => undefined + ); + + expect(result.__endpoint).to.deep.equal({ + ...FULL_ENDPOINT, + platform: "gcfv2", + taskQueueTrigger: { + retryConfig: { + maxAttempts: 4, + maxRetrySeconds: 10, + }, + rateLimits: { + maxDispatchesPerSecond: 10, + }, + invoker: ["private"], + }, + }); + }); + + it("should merge options and globalOptions", () => { + options.setGlobalOptions({ + concurrency: 20, + region: "europe-west1", + minInstances: 1, + }); + + const result = onTaskDispatched( + { + region: "us-west1", + minInstances: 3, + }, + () => undefined + ); + + expect(result.__trigger).to.deep.equal({ + platform: "gcfv2", + taskQueueTrigger: {}, + concurrency: 20, + minInstances: 3, + regions: ["us-west1"], + labels: {}, + }); + + expect(result.__endpoint).to.deep.equal({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + concurrency: 20, + minInstances: 3, + region: ["us-west1"], + labels: {}, + taskQueueTrigger: MINIMIAL_TASK_QUEUE_TRIGGER, + }); + }); + + it("has a .run method", async () => { + const request: any = { data: "data" }; + const cf = onTaskDispatched((r) => { + expect(r.data).to.deep.equal(request.data); + }); + + await cf.run(request); + }); + + it("should be an express handler", async () => { + const func = onTaskDispatched(() => undefined); + + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + authorization: "Bearer abc", + origin: "example.com", + } + ); + req.method = "POST"; + + const resp = await runHandler(func, req as any); + expect(resp.status).to.equal(204); + }); + + // These tests pass if the code transpiles + it("allows desirable syntax", () => { + onTaskDispatched((request: Request) => { + // There should be no lint warnings that data is not a string. + console.log(`hello, ${request.data}`); + }); + onTaskDispatched((request: Request) => { + console.log(`hello, ${request.data}`); + }); + onTaskDispatched((request: Request) => { + console.log(`hello, ${request.data}`); + }); + onTaskDispatched((request: Request) => { + console.log(`Hello, ${request.data}`); + }); + }); + + it("calls init function", async () => { + const func = onTaskDispatched(() => null); + + const req = new MockRequest( + { + data: {}, + }, + { + "content-type": "application/json", + origin: "example.com", + } + ); + req.method = "POST"; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await runHandler(func, req as any); + expect(hello).to.equal("world"); + }); +}); diff --git a/spec/v2/providers/testLab.spec.ts b/spec/v2/providers/testLab.spec.ts new file mode 100644 index 000000000..15d649d44 --- /dev/null +++ b/spec/v2/providers/testLab.spec.ts @@ -0,0 +1,95 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { expect } from "chai"; +import * as testLab from "../../../src/v2/providers/testLab"; +import * as options from "../../../src/v2/options"; +import { MINIMAL_V2_ENDPOINT } from "../../fixtures"; +import { CloudEvent, onInit } from "../../../src/v2/core"; + +describe("onTestMatrixCompleted", () => { + afterEach(() => { + options.setGlobalOptions({}); + }); + + it("should create a function with a handler", () => { + const fn = testLab.onTestMatrixCompleted(() => 2); + + expect(fn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + labels: {}, + eventTrigger: { + eventType: testLab.eventType, + eventFilters: {}, + retry: false, + }, + }); + expect(fn.run(1 as any)).to.eq(2); + }); + + it("should create a function with opts and a handler", () => { + options.setGlobalOptions({ + memory: "512MiB", + region: "us-west1", + }); + + const fn = testLab.onTestMatrixCompleted( + { + region: "us-central1", + retry: true, + }, + () => 2 + ); + + expect(fn.__endpoint).to.deep.eq({ + ...MINIMAL_V2_ENDPOINT, + platform: "gcfv2", + availableMemoryMb: 512, + region: ["us-central1"], + labels: {}, + eventTrigger: { + eventType: testLab.eventType, + eventFilters: {}, + retry: true, + }, + }); + expect(fn.run(1 as any)).to.eq(2); + }); + + it("calls init function", async () => { + const event: CloudEvent = { + specversion: "1.0", + id: "id", + source: "source", + type: "type", + time: "now", + data: "data", + }; + + let hello; + onInit(() => (hello = "world")); + expect(hello).to.be.undefined; + await testLab.onTestMatrixCompleted(() => null)(event); + expect(hello).to.equal("world"); + }); +}); diff --git a/src/apps.ts b/src/apps.ts deleted file mode 100644 index 37d7febbc..000000000 --- a/src/apps.ts +++ /dev/null @@ -1,118 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as _ from 'lodash'; -import * as firebase from 'firebase-admin'; -import { firebaseConfig } from './config'; - -/** @internal */ -export function apps(): apps.Apps { - if (typeof apps.singleton === 'undefined') { - apps.init(); - } - return apps.singleton; -} - -/** @internal */ -export namespace apps { - /** @internal */ - export const garbageCollectionInterval = 2 * 60 * 1000; - - /** @internal */ - export function delay(delay: number) { - return new Promise(resolve => { - setTimeout(resolve, delay); - }); - } - - export let singleton: apps.Apps; - - export let init = () => singleton = new Apps(); - - export interface AuthMode { - admin: boolean; - variable?: any; - } - - /** @internal */ - export interface RefCounter { - [appName: string]: number; - } - - /** @internal */ - export class Apps { - private _refCounter: RefCounter; - - constructor() { - this._refCounter = {}; - } - - _appAlive(appName: string): boolean { - try { - let app = firebase.app(appName); - return !_.get(app, 'isDeleted_'); - } catch (e) { - return false; - } - } - - _destroyApp(appName: string) { - if (!this._appAlive(appName)) { - return; - } - firebase.app(appName).delete().catch(_.noop); - } - - retain() { - let increment = (n?: number) => { - return (n || 0) + 1; - }; - // Increment counter for admin because function might use event.data.ref - _.update(this._refCounter, '__admin__', increment); - } - - release() { - let decrement = (n: number) => { - return n - 1; - }; - return delay(garbageCollectionInterval).then(() => { - _.update(this._refCounter, '__admin__', decrement); - _.forEach(this._refCounter, (count, key) => { - if (count <= 0) { - this._destroyApp(key); - } - }); - }); - } - - get admin(): firebase.app.App { - if (this._appAlive('__admin__')) { - return firebase.app('__admin__'); - } - return firebase.initializeApp(this.firebaseArgs, '__admin__'); - } - - private get firebaseArgs() { - return _.assign({}, firebaseConfig(), {credential: firebase.credential.applicationDefault()}); - } - } -} diff --git a/src/bin/firebase-functions.ts b/src/bin/firebase-functions.ts new file mode 100644 index 000000000..73f1b9caa --- /dev/null +++ b/src/bin/firebase-functions.ts @@ -0,0 +1,130 @@ +#!/usr/bin/env node + +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as http from "http"; +import express from "express"; +import fs from "fs/promises"; +import * as path from "path"; +import { loadStack } from "../runtime/loader"; +import { stackToWire } from "../runtime/manifest"; + +function printUsageAndExit() { + console.error( + ` +Usage: firebase-functions [functionsDir] + +Arguments: + - functionsDir: Directory containing source code for Firebase Functions. +` + ); + process.exit(1); +} + +let functionsDir = "."; + +const args = process.argv.slice(2); +if (args.length > 1) { + if (args[0] === "-h" || args[0] === "--help") { + printUsageAndExit(); + } + functionsDir = args[0]; +} + +function handleQuitquitquit(req: express.Request, res: express.Response, server: http.Server) { + res.send("ok"); + server.close(); +} + +if (process.env.FUNCTIONS_MANIFEST_OUTPUT_PATH) { + void (async () => { + const outputPath = process.env.FUNCTIONS_MANIFEST_OUTPUT_PATH; + try { + // Validate the output path + const dir = path.dirname(outputPath); + try { + await fs.access(dir, fs.constants.W_OK); + } catch (e) { + console.error( + `Error: Cannot write to directory '${dir}': ${e instanceof Error ? e.message : String(e)}` + ); + console.error("Please ensure the directory exists and you have write permissions."); + process.exit(1); + } + + const stack = await loadStack(functionsDir); + const wireFormat = stackToWire(stack); + await fs.writeFile(outputPath, JSON.stringify(wireFormat, null, 2)); + process.exit(0); + } catch (e: any) { + if (e.code === "ENOENT") { + console.error(`Error: Directory '${path.dirname(outputPath)}' does not exist.`); + console.error("Please create the directory or specify a valid path."); + } else if (e.code === "EACCES") { + console.error(`Error: Permission denied writing to '${outputPath}'.`); + console.error("Please check file permissions or choose a different location."); + } else if (e.message?.includes("Failed to generate manifest")) { + console.error(e.message); + } else { + console.error( + `Failed to generate manifest from function source: ${ + e instanceof Error ? e.message : String(e) + }` + ); + } + if (e instanceof Error && e.stack) { + console.error(e.stack); + } + process.exit(1); + } + })(); +} else { + let server: http.Server = undefined; + const app = express(); + + app.get("/__/quitquitquit", (req, res) => handleQuitquitquit(req, res, server)); + app.post("/__/quitquitquit", (req, res) => handleQuitquitquit(req, res, server)); + + if (process.env.FUNCTIONS_CONTROL_API === "true") { + // eslint-disable-next-line @typescript-eslint/no-misused-promises + app.get("/__/functions.yaml", async (req, res) => { + try { + const stack = await loadStack(functionsDir); + res.setHeader("content-type", "text/yaml"); + res.send(JSON.stringify(stackToWire(stack))); + } catch (e) { + console.error(e); + const errorMessage = e instanceof Error ? e.message : String(e); + res.status(400).send(`Failed to generate manifest from function source: ${errorMessage}`); + } + }); + } + + let port = 8080; + if (process.env.PORT) { + port = Number.parseInt(process.env.PORT); + } + + console.log("Serving at port", port); + server = app.listen(port); +} diff --git a/src/cloud-functions.ts b/src/cloud-functions.ts deleted file mode 100644 index 3717f8ca1..000000000 --- a/src/cloud-functions.ts +++ /dev/null @@ -1,311 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { apps } from './apps'; -import * as _ from 'lodash'; -import { Request, Response } from 'express'; -export { Request, Response }; -const WILDCARD_REGEX = new RegExp('{[^/{}]*}', 'g'); - -/** Legacy wire format for an event - * @internal - */ -export interface LegacyEvent { - data: any; - eventType?: string; - resource?: string; - eventId?: string; - timestamp?: string; - params?: { [option: string]: any }; - auth?: apps.AuthMode; -} - -/** Wire format for an event - * @internal - */ -export interface Event { - context: { - eventId: string; - timestamp: string; - eventType: string; - resource: Resource; - }; - data: any; -} - -/** The context in which an event occurred. - * An EventContext describes: - * - The time an event occurred. - * - A unique identifier of the event. - * - The resource on which the event occurred, if applicable. - * - Authorization of the request that triggered the event, if applicable and available. - */ -export interface EventContext { - /** ID of the event */ - eventId: string; - /** Timestamp for when the event occured (ISO string) */ - timestamp: string; - /** Type of event */ - eventType: string; - /** Resource that triggered the event */ - resource: Resource; - /** Key-value pairs that represent the values of wildcards in a database reference */ - params: { [option: string]: any }; // added by SDK, but may be {} - /** Type of authentication for the triggering action, valid value are: 'ADMIN', 'USER', - * 'UNAUTHENTICATED'. Only available for database functions. - */ - authType?: 'ADMIN' | 'USER' | 'UNAUTHENTICATED'; - /** Firebase auth variable for the user whose action triggered the function. Field will be - * null for unauthenticated users, and will not exist for admin users. Only available - * for database functions. - */ - auth?: { - uid: string, - token: object, - }; -} - -/** Change describes a change of state - "before" represents the state prior - * to the event, "after" represents the state after the event. - */ -export class Change { - constructor( - public before?: T, - public after?: T, - ) {}; -} - -/** ChangeJson is the JSON format used to construct a Change object. */ -export interface ChangeJson { - /** Key-value pairs representing state of data before the change. - * If `fieldMask` is set, then only fields that changed are present in `before`. - */ - before?: any; - /** Key-value pairs representing state of data after the change. */ - after?: any; - /** Comma-separated string that represents names of field that changed. */ - fieldMask?: string; -} - -export namespace Change { - function reinterpretCast(x: any) { return x as T; } - - /** Factory method for creating a Change from a `before` object and an `after` object. */ - export function fromObjects(before: T, after: T) { - return new Change(before, after); - } - - /** Factory method for creating a Change from a JSON and an optional customizer function to be - * applied to both the `before` and the `after` fields. - */ - export function fromJSON(json: ChangeJson, customizer: (x: any) => T = reinterpretCast): Change { - let before = _.assign({}, json.before); - if (json.fieldMask) { - before = applyFieldMask(before, json.after, json.fieldMask); - } - return Change.fromObjects(customizer(before || {}), customizer(json.after || {})); - } - - /** @internal */ - export function applyFieldMask(sparseBefore: any, after: any, fieldMask: string) { - let before = _.assign({}, after); - let masks = fieldMask.split(','); - _.forEach(masks, mask => { - const val = _.get(sparseBefore, mask); - if (typeof val === 'undefined') { - _.unset(before, mask); - } else { - _.set(before, mask, val); - } - }); - return before; - } -} - -/** Resource is a standard format for defining a resource (google.rpc.context.AttributeContext.Resource). - * In Cloud Functions, it is the resource that triggered the function - such as a storage bucket. - */ -export interface Resource { - service: string; - name: string; - type?: string; - labels?: { [tag: string]: string }; -} - -/** TriggerAnnotated is used internally by the firebase CLI to understand what type of Cloud Function to deploy. */ -export interface TriggerAnnotated { - __trigger: { - httpsTrigger?: {}, - eventTrigger?: { - eventType: string; - resource: string; - service: string; - }, - labels?: { [key: string]: string } - }; -} - -/** A Runnable has a `run` method which directly invokes the user-defined function - useful for unit testing. */ -export interface Runnable { - run: (data: T, context: EventContext) => PromiseLike | any; -} - -/** - * An HttpsFunction is both an object that exports its trigger definitions at __trigger and - * can be called as a function that takes an express.js Request and Response object. - */ -export type HttpsFunction = TriggerAnnotated & ((req: Request, resp: Response) => void); - -/** - * A CloudFunction is both an object that exports its trigger definitions at __trigger and - * can be called as a function using the raw JS API for Google Cloud Functions. - */ -export type CloudFunction = Runnable & TriggerAnnotated & ((input: any) => PromiseLike | any); - -/** @internal */ -export interface MakeCloudFunctionArgs { - // TODO should remove `provider` and require a fully qualified `eventType` - // once all providers have migrated to new format. - provider: string; - eventType: string; - triggerResource: () => string; - service: string; - dataConstructor?: (raw: Event | LegacyEvent) => EventData; - handler: (data: EventData, context: EventContext) => PromiseLike | any; - before?: (raw: Event | LegacyEvent) => void; - after?: (raw: Event | LegacyEvent) => void; - legacyEventType?: string; -} - -/** @internal */ -export function makeCloudFunction({ - provider, - eventType, - triggerResource, - service, - dataConstructor = (raw: Event | LegacyEvent) => raw.data, - handler, - before = () => { return; }, - after = () => { return; }, - legacyEventType, -}: MakeCloudFunctionArgs): CloudFunction { - let cloudFunction: any = async (event: Event | LegacyEvent) => { - if (!_.has(event, 'data')) { - throw Error('Cloud function needs to be called with an event parameter.' + - 'If you are writing unit tests, please use the Node module firebase-functions-fake.'); - } - try { - before(event); - - let dataOrChange = dataConstructor(event); - let context: any; - if (isEvent(event)) { // new event format - context = _.cloneDeep(event.context); - } else { // legacy event format - context = { - eventId: event.eventId, - timestamp: event.timestamp, - eventType: provider + '.' + eventType, - resource: { - service: service, - name: event.resource, - }, - }; - if (provider === 'google.firebase.database') { - context.authType = _detectAuthType(event); - if (context.authType !== 'ADMIN') { - context.auth = _makeAuth(event, context.authType); - } - } - } - - context.params = _makeParams(context, triggerResource); - - let promise = handler(dataOrChange, context); - if (typeof promise === 'undefined') { - console.warn('Function returned undefined, expected Promise or value'); - } - return await promise; - } finally { - after(event); - } - }; - Object.defineProperty(cloudFunction, '__trigger', { - get: () => { - return { - eventTrigger: { - resource: triggerResource(), - eventType: legacyEventType || provider + '.' + eventType, - service, - }, - }; - }, - }); - cloudFunction.run = handler; - return cloudFunction; -} - -function isEvent(event: Event | LegacyEvent): event is Event { - return _.has(event, 'context'); -} - -function _makeParams(context: EventContext, triggerResourceGetter: () => string): { [option: string]: any } { - if (context.params) { // In unit testing, user may directly provide `context.params`. - return context.params; - } - if (!context.resource) { // In unit testing, `resource` may be unpopulated for a test event. - return {}; - } - let triggerResource = triggerResourceGetter(); - let wildcards = triggerResource.match(WILDCARD_REGEX); - let params: { [option: string]: any } = {}; - if (wildcards) { - let triggerResourceParts = _.split(triggerResource, '/'); - let eventResourceParts = _.split(context.resource.name, '/'); - _.forEach(wildcards, wildcard => { - let wildcardNoBraces = wildcard.slice(1,-1); - let position = _.indexOf(triggerResourceParts, wildcard); - params[wildcardNoBraces] = eventResourceParts[position]; - }); - } - return params; -} - -function _makeAuth(event: LegacyEvent, authType: string) { - if (authType === 'UNAUTHENTICATED') { - return null; - } - return { - uid: _.get(event, 'auth.variable.uid'), - token: _.get(event, 'auth.variable.token'), - }; -} - -function _detectAuthType(event: LegacyEvent) { - if (_.get(event, 'auth.admin')) { - return 'ADMIN'; - } - if (_.has(event, 'auth.variable')) { - return 'USER'; - } - return 'UNAUTHENTICATED'; -} diff --git a/src/common/app.ts b/src/common/app.ts new file mode 100644 index 000000000..44f7ca929 --- /dev/null +++ b/src/common/app.ts @@ -0,0 +1,69 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { + App, + applicationDefault, + deleteApp, + getApp as getAppNamed, + initializeApp, +} from "firebase-admin/app"; +import { firebaseConfig } from "./config"; + +const APP_NAME = "__FIREBASE_FUNCTIONS_SDK__"; + +let cache: App; +export function getApp(): App { + if (typeof cache === "undefined") { + try { + cache = getAppNamed(/* default */); + } catch { + // Default app does not exist. Initialize app. + cache = initializeApp( + { + ...firebaseConfig(), + credential: applicationDefault(), + }, + APP_NAME + ); + } + } + return cache; +} + +/** + * This function allows the Firebase Emulator Suite to override the FirebaseApp instance + * used by the Firebase Functions SDK. Developers should never call this function for + * other purposes. + * N.B. For clarity for use in testing this name has no mention of emulation, but + * it must be exported from index as app.setEmulatedAdminApp or we break the emulator. + * We can remove this export when: + * A) We complete the new emulator and no longer depend on monkeypatching + * B) We tweak the CLI to look for different APIs to monkeypatch depending on versions. + * @alpha + */ +export function setApp(app?: App) { + if (cache?.name === APP_NAME) { + void deleteApp(cache); + } + cache = app; +} diff --git a/src/common/change.ts b/src/common/change.ts new file mode 100644 index 000000000..d81b7c1fb --- /dev/null +++ b/src/common/change.ts @@ -0,0 +1,96 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * `ChangeJson` is the JSON format used to construct a `Change` object. + */ +export interface ChangeJson { + /** + * Key-value pairs representing state of data after the change. + */ + after?: any; + /** + * Key-value pairs representing state of data before the change. If + * `fieldMask` is set, then only fields that changed are present in `before`. + */ + before?: any; + /** + * Comma-separated string that represents names of fields that changed. + */ + fieldMask?: string; +} + +/** @internal */ +export function applyFieldMask(sparseBefore: any, after: any, fieldMask: string) { + const before = { ...after }; + const masks = fieldMask.split(","); + + for (const mask of masks) { + const parts = mask.split("."); + const head = parts[0]; + const tail = parts.slice(1).join("."); + if (parts.length > 1) { + before[head] = applyFieldMask(sparseBefore?.[head], after[head], tail); + continue; + } + const val = sparseBefore?.[head]; + if (typeof val === "undefined") { + delete before[mask]; + } else { + before[mask] = val; + } + } + + return before; +} + +/** + * The Cloud Functions interface for events that change state, such as + * Realtime Database or Cloud Firestore `onWrite` and `onUpdate` events. + * + * For more information about the format used to construct `Change` objects, see + * {@link ChangeJson} below. + * + */ +export class Change { + /** + * Factory method for creating a `Change` from a `before` object and an `after` + * object. + */ + static fromObjects(before: T, after: T) { + return new Change(before, after); + } + + /** + * Factory method for creating a `Change` from JSON and an optional customizer + * function to be applied to both the `before` and the `after` fields. + */ + static fromJSON(json: ChangeJson, customizer: (x: any) => T = (x) => x as T): Change { + let before = { ...json.before }; + if (json.fieldMask) { + before = applyFieldMask(before, json.after, json.fieldMask); + } + + return Change.fromObjects(customizer(before || {}), customizer(json.after || {})); + } + constructor(public before: T, public after: T) {} +} diff --git a/src/common/config.ts b/src/common/config.ts new file mode 100644 index 000000000..bf54218ce --- /dev/null +++ b/src/common/config.ts @@ -0,0 +1,57 @@ +import { AppOptions } from "firebase-admin/app"; +import fs from "fs"; +import * as path from "path"; + +import * as logger from "../logger"; + +let cache: AppOptions | null = null; + +/** + * @internal + * @alpha + */ +export function resetCache(newCache: AppOptions = null) { + cache = newCache; +} + +/** + * Get the fields you need to initialize a Firebase app + * @alpha + */ +export function firebaseConfig(): AppOptions | null { + if (cache) { + return cache; + } + + let env = process.env.FIREBASE_CONFIG; + if (env) { + // Firebase Tools will always use a JSON blob in prod, but docs + // explicitly state that the user can set the env to a file: + // https://firebase.google.com/docs/admin/setup#initialize-without-parameters + if (!env.startsWith("{")) { + env = fs.readFileSync(path.join(process.env.PWD, env)).toString("utf8"); + } + + cache = JSON.parse(env); + return cache; + } + + if (process.env.GCLOUD_PROJECT) { + logger.warn( + "Warning, estimating Firebase Config based on GCLOUD_PROJECT. Initializing firebase-admin may fail" + ); + cache = { + databaseURL: + process.env.DATABASE_URL || `https://${process.env.GCLOUD_PROJECT}.firebaseio.com`, + storageBucket: process.env.STORAGE_BUCKET_URL || `${process.env.GCLOUD_PROJECT}.appspot.com`, + projectId: process.env.GCLOUD_PROJECT, + }; + return cache; + } else { + logger.warn( + "Warning, FIREBASE_CONFIG and GCLOUD_PROJECT environment variables are missing. Initializing firebase-admin will fail" + ); + } + + return null; +} diff --git a/src/common/debug.ts b/src/common/debug.ts new file mode 100644 index 000000000..5170fe7ff --- /dev/null +++ b/src/common/debug.ts @@ -0,0 +1,57 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// Do NOT turn on a debug feature in production. +const debugMode = process.env.FIREBASE_DEBUG_MODE === "true"; + +interface DebugFeatures { + skipTokenVerification?: boolean; + enableCors?: boolean; +} + +function loadDebugFeatures(): DebugFeatures { + if (!debugMode) { + return {}; + } + try { + const obj = JSON.parse(process.env.FIREBASE_DEBUG_FEATURES); + if (typeof obj !== "object") { + return {}; + } + return obj as DebugFeatures; + } catch (_e) { + return {}; + } +} + +/* @internal */ +export function debugFeatureValue(feat: keyof DebugFeatures): unknown { + if (!debugMode) { + return; + } + return loadDebugFeatures()[feat]; +} + +/* @internal */ +export function isDebugFeatureEnabled(feat: keyof DebugFeatures): boolean { + return debugMode && !!debugFeatureValue(feat); +} diff --git a/src/common/encoding.ts b/src/common/encoding.ts new file mode 100644 index 000000000..70f2f93c4 --- /dev/null +++ b/src/common/encoding.ts @@ -0,0 +1,120 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { Expression } from "../params"; + +// Copied from firebase-tools/src/gcp/proto + +/** + * A type alias used to annotate interfaces as using a google.protobuf.Duration. + * This type is parsed/encoded as a string of seconds + the "s" prefix. + */ +export type Duration = string; + +/** Get a google.protobuf.Duration for a number of seconds. */ +export function durationFromSeconds(s: number): Duration { + return `${s}s`; +} + +/** + * Utility function to help copy fields from type A to B. + * As a safety net, catches typos or fields that aren't named the same + * in A and B, but cannot verify that both Src and Dest have the same type for the same field. + */ +export function copyIfPresent( + dest: Dest, + src: Src, + ...fields: Array +) { + if (!src) { + return; + } + for (const field of fields) { + if (!Object.prototype.hasOwnProperty.call(src, field)) { + continue; + } + dest[field] = src[field] as any; + } +} + +export function convertIfPresent( + dest: Dest, + src: Src, + destField: keyof Dest, + srcField: keyof Src, + converter: (from: any) => any = (from: any) => { + return from; + } +) { + if (!src) { + return; + } + if (!Object.prototype.hasOwnProperty.call(src, srcField)) { + return; + } + dest[destField] = converter(src[srcField]); +} + +export function serviceAccountFromShorthand( + serviceAccount: string | Expression +): string | Expression | null { + if (serviceAccount === "default") { + return null; + } else if (serviceAccount instanceof Expression) { + return serviceAccount; + } else if (serviceAccount.endsWith("@")) { + if (!process.env.GCLOUD_PROJECT) { + throw new Error( + `Unable to determine email for service account '${serviceAccount}' because process.env.GCLOUD_PROJECT is not set.` + ); + } + return `${serviceAccount}${process.env.GCLOUD_PROJECT}.iam.gserviceaccount.com`; + } else if (serviceAccount.includes("@")) { + return serviceAccount; + } else { + throw new Error( + `Invalid option for serviceAccount: '${serviceAccount}'. Valid options are 'default', a service account email, or '{serviceAccountName}@'` + ); + } +} + +export function convertInvoker(invoker: string | string[]): string[] { + if (typeof invoker === "string") { + invoker = [invoker]; + } + + if (invoker.length === 0) { + throw new Error("Invalid option for invoker: Must be a non-empty array."); + } + + if (invoker.find((inv) => inv.length === 0)) { + throw new Error("Invalid option for invoker: Must be a non-empty string."); + } + + if (invoker.length > 1 && invoker.find((inv) => inv === "public" || inv === "private")) { + throw new Error( + "Invalid option for invoker: Cannot have 'public' or 'private' in an array of service accounts." + ); + } + + return invoker; +} diff --git a/src/common/onInit.ts b/src/common/onInit.ts new file mode 100644 index 000000000..e1b32ca64 --- /dev/null +++ b/src/common/onInit.ts @@ -0,0 +1,39 @@ +import * as logger from "../logger"; + +let initCallback: (() => unknown) | null = null; +let didInit = false; + +/** + * Registers a callback that should be run when in a production environment + * before executing any functions code. + * Calling this function more than once leads to undefined behavior. + * @param callback initialization callback to be run before any function executes. + */ +export function onInit(callback: () => unknown) { + if (initCallback) { + logger.warn( + "Setting onInit callback more than once. Only the most recent callback will be called" + ); + } + initCallback = callback; + didInit = false; +} + +type Resolved = T extends Promise ? V : T; + +/** @internal */ +export function withInit unknown>(func: T) { + return async (...args: Parameters): Promise>> => { + if (!didInit) { + if (initCallback) { + await initCallback(); + } + didInit = true; + } + + // Note: This cast is actually inaccurate because it may be a promise, but + // it doesn't actually matter because the async function will promisify + // non-promises and forward promises. + return func(...args) as Resolved>; + }; +} diff --git a/src/common/options.ts b/src/common/options.ts new file mode 100644 index 000000000..229fc1f27 --- /dev/null +++ b/src/common/options.ts @@ -0,0 +1,48 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +/** + * Special configuration type to reset configuration to platform default. + * + * @alpha + */ +export class ResetValue { + toJSON(): null { + return null; + } + // eslint-disable-next-line @typescript-eslint/no-empty-function + private constructor() {} + public static getInstance() { + return new ResetValue(); + } +} + +/** + * Special configuration value to reset configuration to platform default. + */ +export const RESET_VALUE = ResetValue.getInstance(); + +/** + * @internal + */ +export type ResettableKeys = Required<{ + [K in keyof T as [ResetValue] extends [T[K]] ? K : never]: null; +}>; diff --git a/src/common/params.ts b/src/common/params.ts new file mode 100644 index 000000000..8ff3f30a1 --- /dev/null +++ b/src/common/params.ts @@ -0,0 +1,96 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { Expression } from "../params"; + +/** + * A type that splits literal string S with delimiter D. + * + * For example Split<"a/b/c", "/"> is ['a' | "b" | "c"] + */ +export type Split = + // A non-literal string splits into a string[] + string extends S + ? string[] + : // A literal empty string turns into a zero-tuple + S extends "" + ? [] + : // Split the string; Head may be the empty string + S extends `${D}${infer Tail}` + ? [...Split] + : S extends `${infer Head}${D}${infer Tail}` + ? // Drop types that are exactly string; they'll eat up literal string types + string extends Head + ? [...Split] + : [Head, ...Split] + : // A string without delimiters splits into an array of itself + [S]; + +/** + * A type that ensure that type S is not null or undefined. + */ +export type NullSafe = S extends null + ? never + : S extends undefined + ? never + : S extends string + ? S + : never; + +/** + * A type that extracts parameter name enclosed in bracket as string. + * Ignore wildcard matches + * + * For example, VarName<"{uid}"> is "uid". + * For example, VarName<"{uid=*}"> is "uid". + * For example, VarName<"{uid=**}"> is "uid". + */ +export type VarName = Part extends `{${infer Param}=**}` + ? Param + : Part extends `{${infer Param}=*}` + ? Param + : Part extends `{${infer Param}}` + ? Param + : never; + +/** + * A type that maps all parameter capture groups into keys of a record. + * For example, ParamsOf<"users/{uid}"> is { uid: string } + * ParamsOf<"users/{uid}/logs/{log}"> is { uid: string; log: string } + * ParamsOf<"some/static/data"> is {} + * + * For flexibility reasons, ParamsOf is Record + */ +export type ParamsOf> = + // if we have lost type information, revert back to an untyped dictionary + PathPattern extends Expression + ? Record + : string extends PathPattern + ? Record + : { + // N.B. I'm not sure why PathPattern isn't detected to not be an + // Expression per the check above. Since we have the check above + // The Exclude call should be safe. + [Key in VarName< + Split>>, "/">[number] + >]: string; + }; diff --git a/src/common/providers/database.ts b/src/common/providers/database.ts new file mode 100644 index 000000000..b9f059157 --- /dev/null +++ b/src/common/providers/database.ts @@ -0,0 +1,344 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { App } from "firebase-admin/app"; +import * as database from "firebase-admin/database"; +import { firebaseConfig } from "../../common/config"; +import { joinPath, pathParts } from "../../common/utilities/path"; + +/** + * Pulled from @firebase/database-types, make sure the interface is updated on dependencies upgrades. + * Represents a child snapshot of a `Reference` that is being iterated over. The key will never be undefined. + */ +interface IteratedDataSnapshot extends DataSnapshot { + key: string; // key of the location of this snapshot. +} + +/** + * Interface representing a Firebase Realtime database data snapshot. + */ +export class DataSnapshot implements database.DataSnapshot { + public instance: string; + + /** @hidden */ + private _ref: database.Reference; + + /** @hidden */ + private _path: string; + + /** @hidden */ + private _data: any; + + /** @hidden */ + private _childPath: string; + + constructor( + data: any, + path?: string, // path is undefined for the database root + private app?: App, + instance?: string + ) { + const config = firebaseConfig(); + if (instance) { + // SDK always supplies instance, but user's unit tests may not + this.instance = instance; + } else if (app) { + this.instance = app.options.databaseURL; + } else if (config.databaseURL) { + this.instance = config.databaseURL; + } else if (process.env.GCLOUD_PROJECT) { + this.instance = "https://" + process.env.GCLOUD_PROJECT + "-default-rtdb.firebaseio.com"; + } + + this._path = path; + this._data = data; + } + + /** + * Returns a [`Reference`](/docs/reference/admin/node/admin.database.Reference) + * to the database location where the triggering write occurred. Has + * full read and write access. + */ + get ref(): database.Reference { + if (!this.app) { + // may be unpopulated in user's unit tests + throw new Error( + "Please supply a Firebase app in the constructor for DataSnapshot" + + " in order to use the .ref method." + ); + } + if (!this._ref) { + let db: database.Database; + if (this.instance) { + db = database.getDatabaseWithUrl(this.instance, this.app); + } else { + db = database.getDatabase(this.app); + } + this._ref = db.ref(this._fullPath()); + } + return this._ref; + } + + /** + * The key (last part of the path) of the location of this `DataSnapshot`. + * + * The last token in a database location is considered its key. For example, + * "ada" is the key for the `/users/ada/` node. Accessing the key on any + * `DataSnapshot` returns the key for the location that generated it. + * However, accessing the key on the root URL of a database returns `null`. + */ + get key(): string | null { + const segments = pathParts(this._fullPath()); + const last = segments[segments.length - 1]; + return !last || last === "" ? null : last; + } + + /** + * Extracts a JavaScript value from a `DataSnapshot`. + * + * Depending on the data in a `DataSnapshot`, the `val()` method may return a + * scalar type (string, number, or boolean), an array, or an object. It may also + * return `null`, indicating that the `DataSnapshot` is empty (contains no + * data). + * + * @return The snapshot's contents as a JavaScript value (Object, + * Array, string, number, boolean, or `null`). + */ + val(): any { + const parts = pathParts(this._childPath); + let source = this._data; + if (source === null) { + return null; + } + if (parts.length) { + for (const part of parts) { + if (typeof source === "undefined" || source === null) { + return null; + } + source = source[part]; + } + } + const node = source ?? null; + + return this._checkAndConvertToArray(node); + } + + /** + * Exports the entire contents of the `DataSnapshot` as a JavaScript object. + * + * @return The contents of the `DataSnapshot` as a JavaScript value + * (Object, Array, string, number, boolean, or `null`). + */ + exportVal(): any { + return this.val(); + } + + /** + * Gets the priority value of the data in this `DataSnapshot`. + * + * As an alternative to using priority, applications can order collections by + * ordinary properties. See [Sorting and filtering + * data](/docs/database/web/lists-of-data#sorting_and_filtering_data). + * + * @return The priority value of the data. + */ + getPriority(): string | number | null { + return 0; + } + + /** + * Returns `true` if this `DataSnapshot` contains any data. It is slightly more + * efficient than using `snapshot.val() !== null`. + * + * @return `true` if this `DataSnapshot` contains any data; otherwise, `false`. + */ + exists(): boolean { + const val = this.val(); + if (typeof val === "undefined" || val === null) { + return false; + } + if (typeof val === "object" && Object.keys(val).length === 0) { + return false; + } + return true; + } + + /** + * Gets a `DataSnapshot` for the location at the specified relative path. + * + * The relative path can either be a simple child name (for example, "ada") or + * a deeper slash-separated path (for example, "ada/name/first"). + * + * @param path A relative path from this location to the desired child + * location. + * @return The specified child location. + */ + child(childPath: string): DataSnapshot { + if (!childPath) { + return this; + } + return this._dup(childPath); + } + + /** + * Enumerates the `DataSnapshot`s of the children items. + * + * Because of the way JavaScript objects work, the ordering of data in the + * JavaScript object returned by `val()` is not guaranteed to match the ordering + * on the server nor the ordering of `child_added` events. That is where + * `forEach()` comes in handy. It guarantees the children of a `DataSnapshot` + * can be iterated in their query order. + * + * If no explicit `orderBy*()` method is used, results are returned + * ordered by key (unless priorities are used, in which case, results are + * returned by priority). + * + * @param action A function that is called for each child `DataSnapshot`. + * The callback can return `true` to cancel further enumeration. + * + * @return `true` if enumeration was canceled due to your callback + * returning `true`. + */ + forEach(action: (a: IteratedDataSnapshot) => boolean | void): boolean { + const val = this.val() || {}; + if (typeof val === "object") { + return Object.keys(val).some((key) => action(this.child(key)) === true); + } + return false; + } + + /** + * Returns `true` if the specified child path has (non-`null`) data. + * + * @param path A relative path to the location of a potential child. + * @return `true` if data exists at the specified child path; otherwise, + * `false`. + */ + hasChild(childPath: string): boolean { + return this.child(childPath).exists(); + } + + /** + * Returns whether or not the `DataSnapshot` has any non-`null` child + * properties. + * + * You can use `hasChildren()` to determine if a `DataSnapshot` has any + * children. If it does, you can enumerate them using `forEach()`. If it + * doesn't, then either this snapshot contains a primitive value (which can be + * retrieved with `val()`) or it is empty (in which case, `val()` returns + * `null`). + * + * @return `true` if this snapshot has any children; else `false`. + */ + hasChildren(): boolean { + const val = this.val(); + return val !== null && typeof val === "object" && Object.keys(val).length > 0; + } + + /** + * Returns the number of child properties of this `DataSnapshot`. + * + * @return Number of child properties of this `DataSnapshot`. + */ + numChildren(): number { + const val = this.val(); + return val !== null && typeof val === "object" ? Object.keys(val).length : 0; + } + + /** + * Returns a JSON-serializable representation of this object. + * + * @return A JSON-serializable representation of this object. + */ + toJSON(): Record { + return this.val(); + } + + /** Recursive function to check if keys are numeric & convert node object to array if they are + * + * @hidden + */ + private _checkAndConvertToArray(node: any): any { + if (node === null || typeof node === "undefined") { + return null; + } + if (typeof node !== "object") { + return node; + } + const obj: any = {}; + let numKeys = 0; + let maxKey = 0; + let allIntegerKeys = true; + for (const key in node) { + if (!node.hasOwnProperty(key)) { + continue; + } + const childNode = node[key]; + const v = this._checkAndConvertToArray(childNode); + if (v === null) { + // Empty child node + continue; + } + obj[key] = v; + numKeys++; + const integerRegExp = /^(0|[1-9]\d*)$/; + if (allIntegerKeys && integerRegExp.test(key)) { + maxKey = Math.max(maxKey, Number(key)); + } else { + allIntegerKeys = false; + } + } + + if (numKeys === 0) { + // Empty node + return null; + } + + if (allIntegerKeys && maxKey < 2 * numKeys) { + // convert to array. + const array: any = []; + for (const key of Object.keys(obj)) { + array[key] = obj[key]; + } + + return array; + } + return obj; + } + + /** @hidden */ + private _dup(childPath?: string): DataSnapshot { + const dup = new DataSnapshot(this._data, undefined, this.app, this.instance); + [dup._path, dup._childPath] = [this._path, this._childPath]; + + if (childPath) { + dup._childPath = joinPath(dup._childPath, childPath); + } + + return dup; + } + + /** @hidden */ + private _fullPath(): string { + return (this._path || "") + "/" + (this._childPath || ""); + } +} diff --git a/src/common/providers/firestore.ts b/src/common/providers/firestore.ts new file mode 100644 index 000000000..8a74b588a --- /dev/null +++ b/src/common/providers/firestore.ts @@ -0,0 +1,132 @@ +// The MIT License (MIT) +// +// Copyright (c) 2023 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as firestore from "firebase-admin/firestore"; +import * as logger from "../../logger"; +import { getApp } from "../../common/app"; +import { google } from "../../../protos/compiledFirestore"; +import { dateToTimestampProto } from "../../common/utilities/encoder"; + +/** static-complied protobufs */ +const DocumentEventData = google.events.cloud.firestore.v1.DocumentEventData; + +let firestoreInstance: any; + +/** @hidden */ +function _getValueProto(data: any, resource: string, valueFieldName: string) { + const value = data?.[valueFieldName]; + if ( + typeof value === "undefined" || + value === null || + (typeof value === "object" && !Object.keys(value).length) + ) { + // Firestore#snapshot_ takes resource string instead of proto for a non-existent snapshot + return resource; + } + const proto = { + fields: value?.fields || {}, + createTime: dateToTimestampProto(value?.createTime), + updateTime: dateToTimestampProto(value?.updateTime), + name: value?.name || resource, + }; + return proto; +} + +/** @internal */ +export function createSnapshotFromProtobuf(data: Uint8Array, path: string, databaseId: string) { + if (!firestoreInstance) { + firestoreInstance = firestore.getFirestore(getApp(), databaseId); + } + try { + const dataBuffer = Buffer.from(data); + const firestoreDecoded = DocumentEventData.decode(dataBuffer); + + return firestoreInstance.snapshot_(firestoreDecoded.value || path, null, "protobufJS"); + } catch (err: unknown) { + logger.error("Failed to decode protobuf and create a snapshot."); + throw err; + } +} + +/** @internal */ +export function createBeforeSnapshotFromProtobuf( + data: Uint8Array, + path: string, + databaseId: string +) { + if (!firestoreInstance) { + firestoreInstance = firestore.getFirestore(getApp(), databaseId); + } + try { + const dataBuffer = Buffer.from(data); + const firestoreDecoded = DocumentEventData.decode(dataBuffer); + + return firestoreInstance.snapshot_(firestoreDecoded.oldValue || path, null, "protobufJS"); + } catch (err: unknown) { + logger.error("Failed to decode protobuf and create a before snapshot."); + throw err; + } +} + +/** @internal */ +export function createSnapshotFromJson( + data: any, + source: string, + createTime: string | undefined, + updateTime: string | undefined, + databaseId?: string +) { + if (!firestoreInstance) { + firestoreInstance = databaseId + ? firestore.getFirestore(getApp(), databaseId) + : firestore.getFirestore(getApp()); + } + const valueProto = _getValueProto(data, source, "value"); + let timeString = createTime || updateTime; + + if (!timeString) { + logger.warn("Snapshot has no readTime. Using now()"); + timeString = new Date().toISOString(); + } + + const readTime = dateToTimestampProto(timeString); + return firestoreInstance.snapshot_(valueProto, readTime, "json"); +} + +/** @internal */ +export function createBeforeSnapshotFromJson( + data: any, + source: string, + createTime: string | undefined, + updateTime: string | undefined, + databaseId?: string +) { + if (!firestoreInstance) { + firestoreInstance = databaseId + ? firestore.getFirestore(getApp(), databaseId) + : firestore.getFirestore(getApp()); + } + + const oldValueProto = _getValueProto(data, source, "oldValue"); + const oldReadTime = dateToTimestampProto(createTime || updateTime); + return firestoreInstance.snapshot_(oldValueProto, oldReadTime, "json"); +} diff --git a/src/common/providers/https.ts b/src/common/providers/https.ts new file mode 100644 index 000000000..e6d69cc5b --- /dev/null +++ b/src/common/providers/https.ts @@ -0,0 +1,981 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import cors from "cors"; +import * as express from "express"; +import { DecodedAppCheckToken } from "firebase-admin/app-check"; + +import * as logger from "../../logger"; + +// TODO(inlined): Decide whether we want to un-version apps or whether we want a +// different strategy +import { getAppCheck } from "firebase-admin/app-check"; +import { DecodedIdToken, getAuth } from "firebase-admin/auth"; +import { getApp } from "../app"; +import { isDebugFeatureEnabled } from "../debug"; +import { TaskContext } from "./tasks"; + +const JWT_REGEX = /^[a-zA-Z0-9\-_=]+?\.[a-zA-Z0-9\-_=]+?\.([a-zA-Z0-9\-_=]+)?$/; + +/** @internal */ +export const CALLABLE_AUTH_HEADER = "x-callable-context-auth"; +/** @internal */ +export const ORIGINAL_AUTH_HEADER = "x-original-auth"; +/** @internal */ +export const DEFAULT_HEARTBEAT_SECONDS = 30; + +/** An express request with the wire format representation of the request body. */ +export interface Request extends express.Request { + /** The wire format representation of the request body. */ + rawBody: Buffer; +} + +/** + * The interface for AppCheck tokens verified in Callable functions + */ +export interface AppCheckData { + /** + * The app ID of a Firebase App attested by the App Check token. + */ + appId: string; + /** + * Decoded App Check token. + */ + token: DecodedAppCheckToken; + /** + * Indicates if the token has been consumed. + * + * @remarks + * `false` value indicates that this is the first time the App Check service has seen this token and marked the + * token as consumed for future use of the token. + * + * `true` value indicates the token has previously been marked as consumed by the App Check service. In this case, + * consider taking extra precautions, such as rejecting the request or requiring additional security checks. + */ + alreadyConsumed?: boolean; +} + +/** + * The interface for Auth tokens verified in Callable functions + */ +export interface AuthData { + /** The user's uid from the request's ID token. */ + uid: string; + /** The decoded claims of the ID token after verification. */ + token: DecodedIdToken; + /** The raw ID token as parsed from the header. */ + rawToken: string; +} + +// This type is the direct v1 callable interface and is also an interface +// that the v2 API can conform to. This allows us to pass the v2 CallableRequest +// directly to the same helper methods. +/** + * The interface for metadata for the API as passed to the handler. + */ +export interface CallableContext { + /** + * The result of decoding and verifying a Firebase AppCheck token. + */ + app?: AppCheckData; + + /** + * The result of decoding and verifying a Firebase Auth ID token. + */ + auth?: AuthData; + + /** + * An unverified token for a Firebase Instance ID. + */ + instanceIdToken?: string; + + /** + * The raw request handled by the callable. + */ + rawRequest: Request; +} + +// This could be a simple extension of CallableContext, but we're +// avoiding that to avoid muddying the docs and making a v2 type depend +// on a v1 type. +/** + * The request used to call a callable function. + */ +export interface CallableRequest { + /** + * The parameters used by a client when calling this function. + */ + data: T; + + /** + * The result of decoding and verifying a Firebase App Check token. + */ + app?: AppCheckData; + + /** + * The result of decoding and verifying a Firebase Auth ID token. + */ + auth?: AuthData; + + /** + * An unverified token for a Firebase Instance ID. + */ + instanceIdToken?: string; + + /** + * The raw request handled by the callable. + */ + rawRequest: Request; + + /** + * Whether this is a streaming request. + * Code can be optimized by not trying to generate a stream of chunks to + * call `response.sendChunk` if `request.acceptsStreaming` is false. + * It is always safe, however, to call `response.sendChunk` as this will + * noop if `acceptsStreaming` is false. + */ + acceptsStreaming: boolean; +} + +/** + * `CallableProxyResponse` allows streaming response chunks and listening to signals + * triggered in events such as a disconnect. + */ +export interface CallableResponse { + /** + * Writes a chunk of the response body to the client. This method can be called + * multiple times to stream data progressively. + * Returns a promise of whether the data was written. This can be false, for example, + * if the request was not a streaming request. Rejects if there is a network error. + */ + sendChunk: (chunk: T) => Promise; + + /** + * An `AbortSignal` that is triggered when the client disconnects or the + * request is terminated prematurely. + */ + signal: AbortSignal; +} + +/** + * The set of Firebase Functions status codes. The codes are the same at the + * ones exposed by {@link https://github.com/grpc/grpc/blob/master/doc/statuscodes.md | gRPC}. + * + * @remarks + * Possible values: + * + * - `cancelled`: The operation was cancelled (typically by the caller). + * + * - `unknown`: Unknown error or an error from a different error domain. + * + * - `invalid-argument`: Client specified an invalid argument. Note that this + * differs from `failed-precondition`. `invalid-argument` indicates + * arguments that are problematic regardless of the state of the system + * (e.g. an invalid field name). + * + * - `deadline-exceeded`: Deadline expired before operation could complete. + * For operations that change the state of the system, this error may be + * returned even if the operation has completed successfully. For example, + * a successful response from a server could have been delayed long enough + * for the deadline to expire. + * + * - `not-found`: Some requested document was not found. + * + * - `already-exists`: Some document that we attempted to create already + * exists. + * + * - `permission-denied`: The caller does not have permission to execute the + * specified operation. + * + * - `resource-exhausted`: Some resource has been exhausted, perhaps a + * per-user quota, or perhaps the entire file system is out of space. + * + * - `failed-precondition`: Operation was rejected because the system is not + * in a state required for the operation's execution. + * + * - `aborted`: The operation was aborted, typically due to a concurrency + * issue like transaction aborts, etc. + * + * - `out-of-range`: Operation was attempted past the valid range. + * + * - `unimplemented`: Operation is not implemented or not supported/enabled. + * + * - `internal`: Internal errors. Means some invariants expected by + * underlying system has been broken. If you see one of these errors, + * something is very broken. + * + * - `unavailable`: The service is currently unavailable. This is most likely + * a transient condition and may be corrected by retrying with a backoff. + * + * - `data-loss`: Unrecoverable data loss or corruption. + * + * - `unauthenticated`: The request does not have valid authentication + * credentials for the operation. + */ +export type FunctionsErrorCode = + | "ok" + | "cancelled" + | "unknown" + | "invalid-argument" + | "deadline-exceeded" + | "not-found" + | "already-exists" + | "permission-denied" + | "resource-exhausted" + | "failed-precondition" + | "aborted" + | "out-of-range" + | "unimplemented" + | "internal" + | "unavailable" + | "data-loss" + | "unauthenticated"; + +/** @hidden */ +export type CanonicalErrorCodeName = + | "OK" + | "CANCELLED" + | "UNKNOWN" + | "INVALID_ARGUMENT" + | "DEADLINE_EXCEEDED" + | "NOT_FOUND" + | "ALREADY_EXISTS" + | "PERMISSION_DENIED" + | "UNAUTHENTICATED" + | "RESOURCE_EXHAUSTED" + | "FAILED_PRECONDITION" + | "ABORTED" + | "OUT_OF_RANGE" + | "UNIMPLEMENTED" + | "INTERNAL" + | "UNAVAILABLE" + | "DATA_LOSS"; + +/** @hidden */ +interface HttpErrorCode { + canonicalName: CanonicalErrorCodeName; + status: number; +} + +/** + * Standard error codes and HTTP statuses for different ways a request can fail, + * as defined by: + * https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + * + * This map is used primarily to convert from a client error code string to + * to the HTTP format error code string and status, and make sure it's in the + * supported set. + */ +const errorCodeMap: { [name in FunctionsErrorCode]: HttpErrorCode } = { + ok: { canonicalName: "OK", status: 200 }, + cancelled: { canonicalName: "CANCELLED", status: 499 }, + unknown: { canonicalName: "UNKNOWN", status: 500 }, + "invalid-argument": { canonicalName: "INVALID_ARGUMENT", status: 400 }, + "deadline-exceeded": { canonicalName: "DEADLINE_EXCEEDED", status: 504 }, + "not-found": { canonicalName: "NOT_FOUND", status: 404 }, + "already-exists": { canonicalName: "ALREADY_EXISTS", status: 409 }, + "permission-denied": { canonicalName: "PERMISSION_DENIED", status: 403 }, + unauthenticated: { canonicalName: "UNAUTHENTICATED", status: 401 }, + "resource-exhausted": { canonicalName: "RESOURCE_EXHAUSTED", status: 429 }, + "failed-precondition": { canonicalName: "FAILED_PRECONDITION", status: 400 }, + aborted: { canonicalName: "ABORTED", status: 409 }, + "out-of-range": { canonicalName: "OUT_OF_RANGE", status: 400 }, + unimplemented: { canonicalName: "UNIMPLEMENTED", status: 501 }, + internal: { canonicalName: "INTERNAL", status: 500 }, + unavailable: { canonicalName: "UNAVAILABLE", status: 503 }, + "data-loss": { canonicalName: "DATA_LOSS", status: 500 }, +}; + +/** @hidden */ +interface HttpErrorWireFormat { + details?: unknown; + message: string; + status: CanonicalErrorCodeName; +} + +/** + * An explicit error that can be thrown from a handler to send an error to the + * client that called the function. + */ +export class HttpsError extends Error { + /** + * A standard error code that will be returned to the client. This also + * determines the HTTP status code of the response, as defined in code.proto. + */ + public readonly code: FunctionsErrorCode; + + /** + * Extra data to be converted to JSON and included in the error response. + */ + public readonly details: unknown; + + /** + * A wire format representation of a provided error code. + * + * @hidden + */ + public readonly httpErrorCode: HttpErrorCode; + + constructor(code: FunctionsErrorCode, message: string, details?: unknown) { + super(message); + + // A sanity check for non-TypeScript consumers. + if (code in errorCodeMap === false) { + throw new Error(`Unknown error code: ${code}.`); + } + + this.code = code; + this.details = details; + this.httpErrorCode = errorCodeMap[code]; + } + + /** + * Returns a JSON-serializable representation of this object. + */ + public toJSON(): HttpErrorWireFormat { + const { + details, + httpErrorCode: { canonicalName: status }, + message, + } = this; + + return { + ...(details === undefined ? {} : { details }), + message, + status, + }; + } +} + +/** @hidden */ +// The allowed interface for an HTTP request to a Callable function. +interface HttpRequest extends Request { + body: { + data: any; + }; +} + +/** @hidden */ +// The format for an HTTP body response from a Callable function. +interface HttpResponseBody { + result?: any; + error?: HttpsError; +} + +/** @hidden */ +// Returns true if req is a properly formatted callable request. +export function isValidRequest(req: Request): req is HttpRequest { + // The body must not be empty. + if (!req.body) { + logger.warn("Request is missing body."); + return false; + } + + // Make sure it's a POST. + if (req.method !== "POST") { + logger.warn("Request has invalid method.", req.method); + return false; + } + + // Check that the Content-Type is JSON. + let contentType = (req.header("Content-Type") || "").toLowerCase(); + // If it has a charset, just ignore it for now. + const semiColon = contentType.indexOf(";"); + if (semiColon >= 0) { + contentType = contentType.slice(0, semiColon).trim(); + } + if (contentType !== "application/json") { + logger.warn("Request has incorrect Content-Type.", contentType); + return false; + } + + // The body must have data. + if (typeof req.body.data === "undefined") { + logger.warn("Request body is missing data.", req.body); + return false; + } + + // TODO(klimt): Allow only specific http headers. + + // Verify that the body does not have any extra fields. + const extraKeys = Object.keys(req.body).filter((field) => field !== "data"); + if (extraKeys.length !== 0) { + logger.warn("Request body has extra fields: ", extraKeys.join(", ")); + return false; + } + return true; +} + +/** @hidden */ +const LONG_TYPE = "type.googleapis.com/google.protobuf.Int64Value"; +/** @hidden */ +const UNSIGNED_LONG_TYPE = "type.googleapis.com/google.protobuf.UInt64Value"; + +/** + * Encodes arbitrary data in our special format for JSON. + * This is exposed only for testing. + */ +/** @hidden */ +export function encode(data: any): any { + if (data === null || typeof data === "undefined") { + return null; + } + if (data instanceof Number) { + data = data.valueOf(); + } + if (Number.isFinite(data)) { + // Any number in JS is safe to put directly in JSON and parse as a double + // without any loss of precision. + return data; + } + if (typeof data === "boolean") { + return data; + } + if (typeof data === "string") { + return data; + } + if (Array.isArray(data)) { + return data.map(encode); + } + if (typeof data === "object" || typeof data === "function") { + // Sadly we don't have Object.fromEntries in Node 10, so we can't use a single + // list comprehension + const obj: Record = {}; + for (const [k, v] of Object.entries(data)) { + obj[k] = encode(v); + } + return obj; + } + // If we got this far, the data is not encodable. + logger.error("Data cannot be encoded in JSON.", data); + throw new Error(`Data cannot be encoded in JSON: ${data}`); +} + +/** + * Decodes our special format for JSON into native types. + * This is exposed only for testing. + */ +/** @hidden */ +export function decode(data: any): any { + if (data === null) { + return data; + } + if (data["@type"]) { + switch (data["@type"]) { + case LONG_TYPE: + // Fall through and handle this the same as unsigned. + case UNSIGNED_LONG_TYPE: { + // Technically, this could work return a valid number for malformed + // data if there was a number followed by garbage. But it's just not + // worth all the extra code to detect that case. + const value = parseFloat(data.value); + if (isNaN(value)) { + logger.error("Data cannot be decoded from JSON.", data); + throw new Error(`Data cannot be decoded from JSON: ${data}`); + } + return value; + } + default: { + logger.error("Data cannot be decoded from JSON.", data); + throw new Error(`Data cannot be decoded from JSON: ${data}`); + } + } + } + if (Array.isArray(data)) { + return data.map(decode); + } + if (typeof data === "object") { + const obj: Record = {}; + for (const [k, v] of Object.entries(data)) { + obj[k] = decode(v); + } + return obj; + } + // Anything else is safe to return. + return data; +} + +/** + * Be careful when changing token status values. + * + * Users are encouraged to setup log-based metric based on these values, and + * changing their values may cause their metrics to break. + * + */ +/** @hidden */ +type TokenStatus = "MISSING" | "VALID" | "INVALID"; + +/** @hidden */ +interface CallableTokenStatus { + app: TokenStatus; + auth: TokenStatus; +} + +/** @internal */ +export function unsafeDecodeToken(token: string): unknown { + if (!JWT_REGEX.test(token)) { + return {}; + } + const components = token.split(".").map((s) => Buffer.from(s, "base64").toString()); + let payload = components[1]; + if (typeof payload === "string") { + try { + const obj = JSON.parse(payload); + if (typeof obj === "object") { + payload = obj; + } + } catch (_e) { + // ignore error + } + } + return payload; +} + +/** + * Decode, but not verify, a Auth ID token. + * + * Do not use in production. Token should always be verified using the Admin SDK. + * + * This is exposed only for testing. + */ +/** @internal */ +export function unsafeDecodeIdToken(token: string): DecodedIdToken { + const decoded = unsafeDecodeToken(token) as DecodedIdToken; + decoded.uid = decoded.sub; + return decoded; +} + +/** + * Decode, but not verify, an App Check token. + * + * Do not use in production. Token should always be verified using the Admin SDK. + * + * This is exposed only for testing. + */ +/** @internal */ +export function unsafeDecodeAppCheckToken(token: string): DecodedAppCheckToken { + const decoded = unsafeDecodeToken(token) as DecodedAppCheckToken; + decoded.app_id = decoded.sub; + return decoded; +} + +/** + * Check and verify tokens included in the requests. Once verified, tokens + * are injected into the callable context. + * + * @param {Request} req - Request sent to the Callable function. + * @param {CallableContext} ctx - Context to be sent to callable function handler. + * @returns {CallableTokenStatus} Status of the token verifications. + */ +/** @internal */ +async function checkTokens( + req: Request, + ctx: CallableContext, + options: CallableOptions +): Promise { + const verifications: CallableTokenStatus = { + app: "INVALID", + auth: "INVALID", + }; + + [verifications.auth, verifications.app] = await Promise.all([ + checkAuthToken(req, ctx), + checkAppCheckToken(req, ctx, options), + ]); + + const logPayload = { + verifications, + "logging.googleapis.com/labels": { + "firebase-log-type": "callable-request-verification", + }, + }; + + const errs = []; + if (verifications.app === "INVALID") { + errs.push("AppCheck token was rejected."); + } + if (verifications.auth === "INVALID") { + errs.push("Auth token was rejected."); + } + + if (errs.length === 0) { + logger.debug("Callable request verification passed", logPayload); + } else { + logger.warn(`Callable request verification failed: ${errs.join(" ")}`, logPayload); + } + + return verifications; +} + +/** @interanl */ +export async function checkAuthToken( + req: Request, + ctx: CallableContext | TaskContext +): Promise { + const authorization = req.header("Authorization"); + if (!authorization) { + return "MISSING"; + } + const match = authorization.match(/^Bearer (.*)$/i); + if (!match) { + return "INVALID"; + } + const idToken = match[1]; + try { + let authToken: DecodedIdToken; + if (isDebugFeatureEnabled("skipTokenVerification")) { + authToken = unsafeDecodeIdToken(idToken); + } else { + authToken = await getAuth(getApp()).verifyIdToken(idToken); + } + ctx.auth = { + uid: authToken.uid, + token: authToken, + rawToken: idToken, + }; + return "VALID"; + } catch (err) { + logger.warn("Failed to validate auth token.", err); + return "INVALID"; + } +} + +/** @internal */ +async function checkAppCheckToken( + req: Request, + ctx: CallableContext, + options: CallableOptions +): Promise { + const appCheckToken = req.header("X-Firebase-AppCheck"); + if (!appCheckToken) { + return "MISSING"; + } + try { + let appCheckData: AppCheckData; + if (isDebugFeatureEnabled("skipTokenVerification")) { + const decodedToken = unsafeDecodeAppCheckToken(appCheckToken); + appCheckData = { appId: decodedToken.app_id, token: decodedToken }; + if (options.consumeAppCheckToken) { + appCheckData.alreadyConsumed = false; + } + } else { + const appCheck = getAppCheck(getApp()); + if (options.consumeAppCheckToken) { + if (appCheck.verifyToken?.length === 1) { + const errorMsg = + "Unsupported version of the Admin SDK." + + " App Check token will not be consumed." + + " Please upgrade the firebase-admin to the latest version."; + logger.error(errorMsg); + throw new HttpsError("internal", "Internal Error"); + } + appCheckData = await getAppCheck(getApp()).verifyToken(appCheckToken, { consume: true }); + } else { + appCheckData = await getAppCheck(getApp()).verifyToken(appCheckToken); + } + } + ctx.app = appCheckData; + return "VALID"; + } catch (err) { + logger.warn("Failed to validate AppCheck token.", err); + if (err instanceof HttpsError) { + throw err; + } + return "INVALID"; + } +} + +type v1CallableHandler = (data: any, context: CallableContext) => any | Promise; +type v2CallableHandler = ( + request: CallableRequest, + response?: CallableResponse +) => Res; + +/** @internal **/ +export interface CallableOptions { + cors: cors.CorsOptions; + enforceAppCheck?: boolean; + consumeAppCheckToken?: boolean; + /* @deprecated */ + authPolicy?: (token: AuthData | null, data: T) => boolean | Promise; + /** + * Time in seconds between sending heartbeat messages to keep the connection + * alive. Set to `null` to disable heartbeats. + * + * Defaults to 30 seconds. + */ + heartbeatSeconds?: number | null; +} + +/** @internal */ +export function onCallHandler( + options: CallableOptions, + handler: v1CallableHandler | v2CallableHandler, + version: "gcfv1" | "gcfv2" +): (req: Request, res: express.Response) => Promise { + const wrapped = wrapOnCallHandler(options, handler, version); + return (req: Request, res: express.Response) => { + return new Promise((resolve) => { + res.on("finish", resolve); + cors(options.cors)(req, res, () => { + resolve(wrapped(req, res)); + }); + }); + }; +} + +function encodeSSE(data: unknown): string { + return `data: ${JSON.stringify(data)}\n\n`; +} + +/** @internal */ +function wrapOnCallHandler( + options: CallableOptions, + handler: v1CallableHandler | v2CallableHandler, + version: "gcfv1" | "gcfv2" +): (req: Request, res: express.Response) => Promise { + return async (req: Request, res: express.Response): Promise => { + const abortController = new AbortController(); + let heartbeatInterval: NodeJS.Timeout | null = null; + + const heartbeatSeconds = + options.heartbeatSeconds === undefined ? DEFAULT_HEARTBEAT_SECONDS : options.heartbeatSeconds; + + const clearScheduledHeartbeat = () => { + if (heartbeatInterval) { + clearTimeout(heartbeatInterval); + heartbeatInterval = null; + } + }; + + const scheduleHeartbeat = () => { + clearScheduledHeartbeat(); + if (!abortController.signal.aborted) { + heartbeatInterval = setTimeout(() => { + if (!abortController.signal.aborted) { + res.write(": ping\n\n"); + scheduleHeartbeat(); + } + }, heartbeatSeconds * 1000); + } + }; + + res.on("close", () => { + clearScheduledHeartbeat(); + abortController.abort(); + }); + + try { + if (!isValidRequest(req)) { + logger.error("Invalid request, unable to process."); + throw new HttpsError("invalid-argument", "Bad Request"); + } + + const context: CallableContext = { rawRequest: req }; + + // TODO(colerogers): yank this when we release a breaking change of the CLI that removes + // our monkey-patching code referenced below and increases the minimum supported SDK version. + // + // Note: This code is needed to fix v1 callable functions in the emulator with a monorepo setup. + // The original monkey-patched code lived in the functionsEmulatorRuntime + // (link: https://github.com/firebase/firebase-tools/blob/accea7abda3cc9fa6bb91368e4895faf95281c60/src/emulator/functionsEmulatorRuntime.ts#L480) + // and was not compatible with how monorepos separate out packages (see https://github.com/firebase/firebase-tools/issues/5210). + if (isDebugFeatureEnabled("skipTokenVerification") && version === "gcfv1") { + const authContext = context.rawRequest.header(CALLABLE_AUTH_HEADER); + if (authContext) { + logger.debug("Callable functions auth override", { + key: CALLABLE_AUTH_HEADER, + value: authContext, + }); + context.auth = JSON.parse(decodeURIComponent(authContext)); + delete context.rawRequest.headers[CALLABLE_AUTH_HEADER]; + } + + const originalAuth = context.rawRequest.header(ORIGINAL_AUTH_HEADER); + if (originalAuth) { + context.rawRequest.headers["authorization"] = originalAuth; + delete context.rawRequest.headers[ORIGINAL_AUTH_HEADER]; + } + } + + const tokenStatus = await checkTokens(req, context, options); + if (tokenStatus.auth === "INVALID") { + throw new HttpsError("unauthenticated", "Unauthenticated"); + } + if (tokenStatus.app === "INVALID") { + if (options.enforceAppCheck) { + throw new HttpsError("unauthenticated", "Unauthenticated"); + } else { + logger.warn( + "Allowing request with invalid AppCheck token because enforcement is disabled" + ); + } + } + if (tokenStatus.app === "MISSING" && options.enforceAppCheck) { + throw new HttpsError("unauthenticated", "Unauthenticated"); + } + + const instanceId = req.header("Firebase-Instance-ID-Token"); + if (instanceId) { + // Validating the token requires an http request, so we don't do it. + // If the user wants to use it for something, it will be validated then. + // Currently, the only real use case for this token is for sending + // pushes with FCM. In that case, the FCM APIs will validate the token. + context.instanceIdToken = req.header("Firebase-Instance-ID-Token"); + } + + const acceptsStreaming = req.header("accept") === "text/event-stream"; + + if (acceptsStreaming && version === "gcfv1") { + // streaming responses are not supported in v1 callable + throw new HttpsError("invalid-argument", "Unsupported Accept header 'text/event-stream'"); + } + + const data: Req = decode(req.body.data); + if (options.authPolicy) { + const authorized = await options.authPolicy(context.auth ?? null, data); + if (!authorized) { + throw new HttpsError("permission-denied", "Permission Denied"); + } + } + let result: Res; + if (version === "gcfv1") { + result = await (handler as v1CallableHandler)(data, context); + } else { + const arg: CallableRequest = { + ...context, + data, + acceptsStreaming, + }; + + const responseProxy: CallableResponse = { + sendChunk(chunk: Stream): Promise { + // if client doesn't accept sse-protocol, response.write() is no-op. + if (!acceptsStreaming) { + return Promise.resolve(false); + } + // if connection is already closed, response.write() is no-op. + if (abortController.signal.aborted) { + return Promise.resolve(false); + } + const formattedData = encodeSSE({ message: chunk }); + let resolve: (wrote: boolean) => void; + let reject: (err: Error) => void; + const p = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + const wrote = res.write(formattedData, (error) => { + if (error) { + reject(error); + return; + } + resolve(wrote); + }); + + // Reset heartbeat timer after successful write + if (wrote && heartbeatInterval !== null && heartbeatSeconds > 0) { + scheduleHeartbeat(); + } + + return p; + }, + signal: abortController.signal, + }; + if (acceptsStreaming) { + // SSE always responds with 200 + res.status(200); + + if (heartbeatSeconds !== null && heartbeatSeconds > 0) { + scheduleHeartbeat(); + } + } + // For some reason the type system isn't picking up that the handler + // is a one argument function. + result = await (handler as any)(arg, responseProxy); + clearScheduledHeartbeat(); + } + if (!abortController.signal.aborted) { + // Encode the result as JSON to preserve types like Dates. + result = encode(result); + // If there was some result, encode it in the body. + const responseBody: HttpResponseBody = { result }; + if (acceptsStreaming) { + res.write(encodeSSE(responseBody)); + res.end(); + } else { + res.status(200).send(responseBody); + } + } else { + res.end(); + } + } catch (err) { + if (!abortController.signal.aborted) { + let httpErr = err; + if (!(err instanceof HttpsError)) { + // This doesn't count as an 'explicit' error. + logger.error("Unhandled error", err); + httpErr = new HttpsError("internal", "INTERNAL"); + } + const { status } = httpErr.httpErrorCode; + const body = { error: httpErr.toJSON() }; + if (version === "gcfv2" && req.header("accept") === "text/event-stream") { + res.write(encodeSSE(body)); + res.end(); + } else { + res.status(status).send(body); + } + } else { + res.end(); + } + } finally { + clearScheduledHeartbeat(); + } + }; +} + +/** + * Wraps an HTTP handler with a safety net for unhandled errors. + * + * This wrapper catches both synchronous errors and rejected Promises from `async` handlers. + * Without this, an unhandled error in an `async` handler would cause the request to hang + * until the platform timeout, as Express (v4) does not await handlers. + * + * It logs the error and returns a 500 Internal Server Error to the client if the response + * headers have not yet been sent. + * + * @internal + */ +export function withErrorHandler( + handler: (req: Request, res: express.Response) => void | Promise +): (req: Request, res: express.Response) => Promise { + return async (req: Request, res: express.Response) => { + try { + await handler(req, res); + } catch (err) { + logger.error("Unhandled error", err); + if (!res.headersSent) { + res.status(500).send("Internal Server Error"); + } + } + }; +} diff --git a/src/common/providers/identity.ts b/src/common/providers/identity.ts new file mode 100644 index 000000000..f2a8a3949 --- /dev/null +++ b/src/common/providers/identity.ts @@ -0,0 +1,944 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; +import * as auth from "firebase-admin/auth"; +import * as logger from "../../logger"; +import { EventContext } from "../../v1/cloud-functions"; +import { getApp } from "../app"; +import { isDebugFeatureEnabled } from "../debug"; +import { HttpsError, unsafeDecodeToken } from "./https"; + +export { HttpsError }; + +const DISALLOWED_CUSTOM_CLAIMS = [ + "acr", + "amr", + "at_hash", + "aud", + "auth_time", + "azp", + "cnf", + "c_hash", + "exp", + "iat", + "iss", + "jti", + "nbf", + "nonce", + "firebase", +]; + +const CLAIMS_MAX_PAYLOAD_SIZE = 1000; + +/** + * Shorthand auth blocking events from GCIP. + * @hidden + * @alpha + */ +export type AuthBlockingEventType = + | "beforeCreate" + | "beforeSignIn" + | "beforeSendEmail" + | "beforeSendSms"; + +const EVENT_MAPPING: Record = { + beforeCreate: "providers/cloud.auth/eventTypes/user.beforeCreate", + beforeSignIn: "providers/cloud.auth/eventTypes/user.beforeSignIn", + beforeSendEmail: "providers/cloud.auth/eventTypes/user.beforeSendEmail", + beforeSendSms: "providers/cloud.auth/eventTypes/user.beforeSendSms", +}; + +/** + * The `UserRecord` passed to Cloud Functions is the same + * {@link https://firebase.google.com/docs/reference/admin/node/firebase-admin.auth.userrecord | UserRecord} + * that is returned by the Firebase Admin SDK. + */ +export type UserRecord = auth.UserRecord; + +/** + * `UserInfo` that is part of the `UserRecord`. + */ +export type UserInfo = auth.UserInfo; + +/** + * Helper class to create the user metadata in a `UserRecord` object. + */ +export class UserRecordMetadata implements auth.UserMetadata { + constructor(public creationTime: string, public lastSignInTime: string) {} + + /** Returns a plain JavaScript object with the properties of UserRecordMetadata. */ + toJSON(): AuthUserMetadata { + return { + creationTime: this.creationTime, + lastSignInTime: this.lastSignInTime, + }; + } +} + +/** + * Helper function that creates a `UserRecord` class from data sent over the wire. + * @param wireData data sent over the wire + * @returns an instance of `UserRecord` with correct toJSON functions + */ +export function userRecordConstructor(wireData: Record): UserRecord { + // Falsey values from the wire format proto get lost when converted to JSON, this adds them back. + const falseyValues: any = { + email: null, + emailVerified: false, + displayName: null, + photoURL: null, + phoneNumber: null, + disabled: false, + providerData: [], + customClaims: {}, + passwordSalt: null, + passwordHash: null, + tokensValidAfterTime: null, + }; + const record = { ...falseyValues, ...wireData }; + + const meta = record.metadata; + if (meta) { + record.metadata = new UserRecordMetadata( + meta.createdAt || meta.creationTime, + meta.lastSignedInAt || meta.lastSignInTime + ); + } else { + record.metadata = new UserRecordMetadata(null, null); + } + record.toJSON = () => { + const { + uid, + email, + emailVerified, + displayName, + photoURL, + phoneNumber, + disabled, + passwordHash, + passwordSalt, + tokensValidAfterTime, + } = record; + const json: Record = { + uid, + email, + emailVerified, + displayName, + photoURL, + phoneNumber, + disabled, + passwordHash, + passwordSalt, + tokensValidAfterTime, + }; + json.metadata = record.metadata.toJSON(); + json.customClaims = JSON.parse(JSON.stringify(record.customClaims)); + json.providerData = record.providerData.map((entry) => { + const newEntry = { ...entry }; + newEntry.toJSON = () => entry; + return newEntry; + }); + return json; + }; + return record as UserRecord; +} + +/** + * User info that is part of the `AuthUserRecord`. + */ +export interface AuthUserInfo { + /** + * The user identifier for the linked provider. + */ + uid: string; + /** + * The display name for the linked provider. + */ + displayName: string; + /** + * The email for the linked provider. + */ + email: string; + /** + * The photo URL for the linked provider. + */ + photoURL: string; + /** + * The linked provider ID (for example, "google.com" for the Google provider). + */ + providerId: string; + /** + * The phone number for the linked provider. + */ + phoneNumber: string; +} + +/** + * Additional metadata about the user. + */ +export interface AuthUserMetadata { + /** + * The date the user was created, formatted as a UTC string. + */ + creationTime: string; + /** + * The date the user last signed in, formatted as a UTC string. + */ + lastSignInTime: string; +} + +/** + * Interface representing the common properties of a user-enrolled second factor. + */ +export interface AuthMultiFactorInfo { + /** + * The ID of the enrolled second factor. This ID is unique to the user. + */ + uid: string; + /** + * The optional display name of the enrolled second factor. + */ + displayName?: string; + /** + * The type identifier of the second factor. For SMS second factors, this is `phone`. + */ + factorId: string; + /** + * The optional date the second factor was enrolled, formatted as a UTC string. + */ + enrollmentTime?: string; + /** + * The phone number associated with a phone second factor. + */ + phoneNumber?: string; +} + +/** + * The multi-factor related properties for the current user, if available. + */ +export interface AuthMultiFactorSettings { + /** + * List of second factors enrolled with the current user. + */ + enrolledFactors: AuthMultiFactorInfo[]; +} + +/** + * The `UserRecord` passed to auth blocking functions from the identity platform. + */ +export interface AuthUserRecord { + /** + * The user's `uid`. + */ + uid: string; + /** + * The user's primary email, if set. + */ + email?: string; + /** + * Whether or not the user's primary email is verified. + */ + emailVerified: boolean; + /** + * The user's display name. + */ + displayName?: string; + /** + * The user's photo URL. + */ + photoURL?: string; + /** + * The user's primary phone number, if set. + */ + phoneNumber?: string; + /** + * Whether or not the user is disabled: `true` for disabled; `false` for + * enabled. + */ + disabled: boolean; + /** + * Additional metadata about the user. + */ + metadata: AuthUserMetadata; + /** + * An array of providers (for example, Google, Facebook) linked to the user. + */ + providerData: AuthUserInfo[]; + /** + * The user's hashed password (base64-encoded). + */ + passwordHash?: string; + /** + * The user's password salt (base64-encoded). + */ + passwordSalt?: string; + /** + * The user's custom claims object if available, typically used to define + * user roles and propagated to an authenticated user's ID token. + */ + customClaims?: Record; + /** + * The ID of the tenant the user belongs to, if available. + */ + tenantId?: string | null; + /** + * The date the user's tokens are valid after, formatted as a UTC string. + */ + tokensValidAfterTime?: string; + /** + * The multi-factor related properties for the current user, if available. + */ + multiFactor?: AuthMultiFactorSettings; +} + +/** The additional user info component of the auth event context */ +export interface AdditionalUserInfo { + providerId?: string; + profile?: any; + username?: string; + isNewUser: boolean; + recaptchaScore?: number; + email?: string; + phoneNumber?: string; +} + +/** The credential component of the auth event context */ +export interface Credential { + claims?: { [key: string]: any }; + idToken?: string; + accessToken?: string; + refreshToken?: string; + expirationTime?: string; + secret?: string; + providerId: string; + signInMethod: string; +} + +/** + * Possible types of emails as described by the GCIP backend, which can be: + * - A sign-in email + * - A password reset email + */ +export type EmailType = "EMAIL_SIGN_IN" | "PASSWORD_RESET"; + +/** + * The type of SMS message, which can be: + * - A sign-in or sign up SMS message + * - A multi-factor sign-in SMS message + * - A multi-factor enrollment SMS message + */ +export type SmsType = "SIGN_IN_OR_SIGN_UP" | "MULTI_FACTOR_SIGN_IN" | "MULTI_FACTOR_ENROLLMENT"; + +/** Defines the auth event context for blocking events */ +export interface AuthEventContext extends EventContext { + locale?: string; + ipAddress: string; + userAgent: string; + additionalUserInfo?: AdditionalUserInfo; + credential?: Credential; + emailType?: EmailType; + smsType?: SmsType; +} + +/** Defines the auth event for 2nd gen blocking events */ +export interface AuthBlockingEvent extends AuthEventContext { + data?: AuthUserRecord; // will be undefined for beforeEmailSent and beforeSmsSent event types +} + +/** The reCAPTCHA action options. */ +export type RecaptchaActionOptions = "ALLOW" | "BLOCK"; + +/** The handler response type for `beforeEmailSent` blocking events */ +export interface BeforeEmailResponse { + recaptchaActionOverride?: RecaptchaActionOptions; +} + +/** The handler response type for `beforeSmsSent` blocking events */ +export interface BeforeSmsResponse { + recaptchaActionOverride?: RecaptchaActionOptions; +} + +/** The handler response type for `beforeCreate` blocking events */ +export interface BeforeCreateResponse { + displayName?: string; + disabled?: boolean; + emailVerified?: boolean; + photoURL?: string; + customClaims?: object; + recaptchaActionOverride?: RecaptchaActionOptions; +} + +/** The handler response type for `beforeSignIn` blocking events */ +export interface BeforeSignInResponse extends BeforeCreateResponse { + sessionClaims?: object; +} + +interface DecodedPayloadUserRecordMetadata { + creation_time?: number; + last_sign_in_time?: number; +} + +interface DecodedPayloadUserRecordUserInfo { + uid: string; + display_name?: string; + email?: string; + photo_url?: string; + phone_number?: string; + provider_id: string; +} + +/** @internal */ +export interface DecodedPayloadMfaInfo { + uid: string; + display_name?: string; + phone_number?: string; + enrollment_time?: string; + factor_id?: string; +} + +interface DecodedPayloadUserRecordEnrolledFactors { + enrolled_factors?: DecodedPayloadMfaInfo[]; +} + +/** @internal */ +export interface DecodedPayloadUserRecord { + uid: string; + email?: string; + email_verified?: boolean; + phone_number?: string; + display_name?: string; + photo_url?: string; + disabled?: boolean; + metadata?: DecodedPayloadUserRecordMetadata; + password_hash?: string; + password_salt?: string; + provider_data?: DecodedPayloadUserRecordUserInfo[]; + multi_factor?: DecodedPayloadUserRecordEnrolledFactors; + custom_claims?: any; + tokens_valid_after_time?: number; + tenant_id?: string; + [key: string]: any; +} + +/** @internal */ +export interface DecodedPayload { + aud: string; + exp: number; + iat: number; + iss: string; + sub?: string; + event_id: string; + event_type: string; + ip_address: string; + user_agent?: string; + locale?: string; + sign_in_method?: string; + user_record?: DecodedPayloadUserRecord; + tenant_id?: string; + raw_user_info?: string; + sign_in_attributes?: { + [key: string]: any; + }; + oauth_id_token?: string; + oauth_access_token?: string; + oauth_refresh_token?: string; + oauth_token_secret?: string; + oauth_expires_in?: number; + recaptcha_score?: number; + email?: string; + email_type?: string; + phone_number?: string; + sms_type?: string; + [key: string]: any; +} + +/** + * Internal definition to include all the fields that can be sent as + * a response from the blocking function to the backend. + * This is added mainly to have a type definition for 'generateResponsePayload' + @internal */ +export interface ResponsePayload { + userRecord?: UserRecordResponsePayload; + recaptchaActionOverride?: RecaptchaActionOptions; +} + +/** @internal */ +export interface UserRecordResponsePayload + extends Omit { + updateMask?: string; +} + +export type MaybeAsync = T | Promise; + +// N.B. As we add support for new auth blocking functions, some auth blocking event handlers +// will not receive a user record object. However, we can't make the user record parameter +// optional because it is listed before the required context parameter. +export type HandlerV1 = ( + userOrContext: AuthUserRecord | AuthEventContext, + context?: AuthEventContext +) => MaybeAsync< + BeforeCreateResponse | BeforeSignInResponse | BeforeEmailResponse | BeforeSmsResponse | void +>; + +export type HandlerV2 = ( + event: AuthBlockingEvent +) => MaybeAsync< + BeforeCreateResponse | BeforeSignInResponse | BeforeEmailResponse | BeforeSmsResponse | void +>; + +export type AuthBlockingEventHandler = (HandlerV1 | HandlerV2) & { + // Specify the GCF gen of the trigger that the auth blocking event handler was written for + platform: "gcfv1" | "gcfv2"; +}; + +/** + * Checks for a valid identity platform web request, otherwise throws an HttpsError. + * @internal + */ +export function isValidRequest(req: express.Request): boolean { + if (req.method !== "POST") { + logger.warn(`Request has invalid method "${req.method}".`); + return false; + } + + const contentType: string = (req.header("Content-Type") || "").toLowerCase(); + if (!contentType.includes("application/json")) { + logger.warn("Request has invalid header Content-Type."); + return false; + } + + if (!req.body?.data?.jwt) { + logger.warn("Request has an invalid body."); + return false; + } + return true; +} + +/** + * Decode, but not verify, an Auth Blocking token. + * + * Do not use in production. Token should always be verified using the Admin SDK. + * + * This is exposed only for testing. + */ +function unsafeDecodeAuthBlockingToken(token: string): DecodedPayload { + const decoded = unsafeDecodeToken(token) as DecodedPayload; + decoded.uid = decoded.sub; + return decoded; +} + +/** + * Helper function to parse the decoded metadata object into a `UserMetaData` object + * @internal + */ +export function parseMetadata(metadata: DecodedPayloadUserRecordMetadata): AuthUserMetadata { + const creationTime = metadata?.creation_time + ? new Date(metadata.creation_time).toUTCString() + : null; + const lastSignInTime = metadata?.last_sign_in_time + ? new Date(metadata.last_sign_in_time).toUTCString() + : null; + return { + creationTime, + lastSignInTime, + }; +} + +/** + * Helper function to parse the decoded user info array into an `AuthUserInfo` array. + * @internal + */ +export function parseProviderData( + providerData: DecodedPayloadUserRecordUserInfo[] +): AuthUserInfo[] { + const providers: AuthUserInfo[] = []; + for (const provider of providerData) { + providers.push({ + uid: provider.uid, + displayName: provider.display_name, + email: provider.email, + photoURL: provider.photo_url, + providerId: provider.provider_id, + phoneNumber: provider.phone_number, + }); + } + return providers; +} + +/** + * Helper function to parse the date into a UTC string. + * @internal + */ +export function parseDate(tokensValidAfterTime?: number): string | null { + if (!tokensValidAfterTime) { + return null; + } + tokensValidAfterTime = tokensValidAfterTime * 1000; + try { + const date = new Date(tokensValidAfterTime); + if (!isNaN(date.getTime())) { + return date.toUTCString(); + } + } catch { + // ignore error + } + return null; +} + +/** + * Helper function to parse the decoded enrolled factors into a valid MultiFactorSettings + * @internal + */ +export function parseMultiFactor( + multiFactor?: DecodedPayloadUserRecordEnrolledFactors +): AuthMultiFactorSettings { + if (!multiFactor) { + return null; + } + const parsedEnrolledFactors: AuthMultiFactorInfo[] = []; + for (const factor of multiFactor.enrolled_factors || []) { + if (!factor.uid) { + throw new HttpsError( + "internal", + "INTERNAL ASSERT FAILED: Invalid multi-factor info response" + ); + } + const enrollmentTime = factor.enrollment_time + ? new Date(factor.enrollment_time).toUTCString() + : null; + parsedEnrolledFactors.push({ + uid: factor.uid, + factorId: factor.phone_number ? factor.factor_id || "phone" : factor.factor_id, + displayName: factor.display_name, + enrollmentTime, + phoneNumber: factor.phone_number, + }); + } + + if (parsedEnrolledFactors.length > 0) { + return { + enrolledFactors: parsedEnrolledFactors, + }; + } + return null; +} + +/** + * Parses the decoded user record into a valid UserRecord for use in the handler + * @internal + */ +export function parseAuthUserRecord( + decodedJWTUserRecord: DecodedPayloadUserRecord +): AuthUserRecord { + if (!decodedJWTUserRecord.uid) { + throw new HttpsError("internal", "INTERNAL ASSERT FAILED: Invalid user response"); + } + + const disabled = decodedJWTUserRecord.disabled || false; + const metadata = parseMetadata(decodedJWTUserRecord.metadata); + const providerData = parseProviderData(decodedJWTUserRecord.provider_data); + const tokensValidAfterTime = parseDate(decodedJWTUserRecord.tokens_valid_after_time); + const multiFactor = parseMultiFactor(decodedJWTUserRecord.multi_factor); + + return { + uid: decodedJWTUserRecord.uid, + email: decodedJWTUserRecord.email, + emailVerified: decodedJWTUserRecord.email_verified, + displayName: decodedJWTUserRecord.display_name, + photoURL: decodedJWTUserRecord.photo_url, + phoneNumber: decodedJWTUserRecord.phone_number, + disabled, + metadata, + providerData, + passwordHash: decodedJWTUserRecord.password_hash, + passwordSalt: decodedJWTUserRecord.password_salt, + customClaims: decodedJWTUserRecord.custom_claims, + tenantId: decodedJWTUserRecord.tenant_id, + tokensValidAfterTime, + multiFactor, + }; +} + +/** Helper to get the `AdditionalUserInfo` from the decoded JWT */ +function parseAdditionalUserInfo(decodedJWT: DecodedPayload): AdditionalUserInfo { + let profile; + let username; + if (decodedJWT.raw_user_info) { + try { + profile = JSON.parse(decodedJWT.raw_user_info); + } catch (err) { + logger.debug(`Parse Error: ${err.message}`); + } + } + if (profile) { + if (decodedJWT.sign_in_method === "github.com") { + username = profile.login; + } + if (decodedJWT.sign_in_method === "twitter.com") { + username = profile.screen_name; + } + } + + return { + providerId: decodedJWT.sign_in_method === "emailLink" ? "password" : decodedJWT.sign_in_method, + profile, + username, + isNewUser: decodedJWT.event_type === "beforeCreate" ? true : false, + recaptchaScore: decodedJWT.recaptcha_score, + email: decodedJWT.email, + phoneNumber: decodedJWT.phone_number, + }; +} + +/** + * Helper to generate a response from the blocking function to the Firebase Auth backend. + * @internal + */ +export function generateResponsePayload( + authResponse?: BeforeCreateResponse | BeforeSignInResponse +): ResponsePayload { + if (!authResponse) { + return {}; + } + + const { recaptchaActionOverride, ...formattedAuthResponse } = authResponse; + const result = {} as ResponsePayload; + const updateMask = getUpdateMask(formattedAuthResponse); + + if (updateMask.length !== 0) { + result.userRecord = { + ...formattedAuthResponse, + updateMask, + }; + } + + if (recaptchaActionOverride !== undefined) { + result.recaptchaActionOverride = recaptchaActionOverride; + } + + return result; +} + +/** Helper to get the Credential from the decoded JWT */ +function parseAuthCredential(decodedJWT: DecodedPayload, time: number): Credential { + if ( + !decodedJWT.sign_in_attributes && + !decodedJWT.oauth_id_token && + !decodedJWT.oauth_access_token && + !decodedJWT.oauth_refresh_token + ) { + return null; + } + return { + claims: decodedJWT.sign_in_attributes, + idToken: decodedJWT.oauth_id_token, + accessToken: decodedJWT.oauth_access_token, + refreshToken: decodedJWT.oauth_refresh_token, + expirationTime: decodedJWT.oauth_expires_in + ? new Date(time + decodedJWT.oauth_expires_in * 1000).toUTCString() + : undefined, + secret: decodedJWT.oauth_token_secret, + providerId: decodedJWT.sign_in_method === "emailLink" ? "password" : decodedJWT.sign_in_method, + signInMethod: decodedJWT.sign_in_method, + }; +} + +/** + * Parses the decoded jwt into a valid AuthEventContext for use in the handler + * @internal + */ +export function parseAuthEventContext( + decodedJWT: DecodedPayload, + projectId: string, + time: number = new Date().getTime() +): AuthEventContext { + const eventType = + (EVENT_MAPPING[decodedJWT.event_type] || decodedJWT.event_type) + + (decodedJWT.sign_in_method ? `:${decodedJWT.sign_in_method}` : ""); + + return { + locale: decodedJWT.locale, + ipAddress: decodedJWT.ip_address, + userAgent: decodedJWT.user_agent, + eventId: decodedJWT.event_id, + eventType, + authType: decodedJWT.user_record ? "USER" : "UNAUTHENTICATED", + resource: { + // TODO(colerogers): figure out the correct service + service: "identitytoolkit.googleapis.com", + name: decodedJWT.tenant_id + ? `projects/${projectId}/tenants/${decodedJWT.tenant_id}` + : `projects/${projectId}`, + }, + timestamp: new Date(decodedJWT.iat * 1000).toUTCString(), + additionalUserInfo: parseAdditionalUserInfo(decodedJWT), + credential: parseAuthCredential(decodedJWT, time), + emailType: decodedJWT.email_type as EmailType, + smsType: decodedJWT.sms_type as SmsType, + params: {}, + }; +} + +/** + * Checks the handler response for invalid customClaims & sessionClaims objects + * @internal + */ +export function validateAuthResponse( + eventType: string, + authRequest?: BeforeCreateResponse | BeforeSignInResponse +) { + if (!authRequest) { + authRequest = {}; + } + if (authRequest.customClaims) { + const invalidClaims = DISALLOWED_CUSTOM_CLAIMS.filter((claim) => + authRequest.customClaims.hasOwnProperty(claim) + ); + if (invalidClaims.length > 0) { + throw new HttpsError( + "invalid-argument", + `The customClaims claims "${invalidClaims.join(",")}" are reserved and cannot be specified.` + ); + } + if (JSON.stringify(authRequest.customClaims).length > CLAIMS_MAX_PAYLOAD_SIZE) { + throw new HttpsError( + "invalid-argument", + `The customClaims payload should not exceed ${CLAIMS_MAX_PAYLOAD_SIZE} characters.` + ); + } + } + if (eventType === "beforeSignIn" && (authRequest as BeforeSignInResponse).sessionClaims) { + const invalidClaims = DISALLOWED_CUSTOM_CLAIMS.filter((claim) => + (authRequest as BeforeSignInResponse).sessionClaims.hasOwnProperty(claim) + ); + if (invalidClaims.length > 0) { + throw new HttpsError( + "invalid-argument", + `The sessionClaims claims "${invalidClaims.join( + "," + )}" are reserved and cannot be specified.` + ); + } + if ( + JSON.stringify((authRequest as BeforeSignInResponse).sessionClaims).length > + CLAIMS_MAX_PAYLOAD_SIZE + ) { + throw new HttpsError( + "invalid-argument", + `The sessionClaims payload should not exceed ${CLAIMS_MAX_PAYLOAD_SIZE} characters.` + ); + } + const combinedClaims = { + ...authRequest.customClaims, + ...(authRequest as BeforeSignInResponse).sessionClaims, + }; + if (JSON.stringify(combinedClaims).length > CLAIMS_MAX_PAYLOAD_SIZE) { + throw new HttpsError( + "invalid-argument", + `The customClaims and sessionClaims payloads should not exceed ${CLAIMS_MAX_PAYLOAD_SIZE} characters combined.` + ); + } + } +} + +/** + * Helper function to generate the update mask for the identity platform changed values + * @internal + */ +export function getUpdateMask(authResponse?: BeforeCreateResponse | BeforeSignInResponse): string { + if (!authResponse) { + return ""; + } + const updateMask: string[] = []; + for (const key in authResponse) { + if (authResponse.hasOwnProperty(key) && typeof authResponse[key] !== "undefined") { + updateMask.push(key); + } + } + return updateMask.join(","); +} + +/** @internal */ +export function wrapHandler(eventType: AuthBlockingEventType, handler: AuthBlockingEventHandler) { + return async (req: express.Request, res: express.Response): Promise => { + try { + const projectId = process.env.GCLOUD_PROJECT; + if (!isValidRequest(req)) { + logger.error("Invalid request, unable to process"); + throw new HttpsError("invalid-argument", "Bad Request"); + } + + if (!auth.getAuth(getApp())._verifyAuthBlockingToken) { + throw new Error( + "Cannot validate Auth Blocking token. Please update Firebase Admin SDK to >= v10.1.0" + ); + } + + const decodedPayload: DecodedPayload = isDebugFeatureEnabled("skipTokenVerification") + ? unsafeDecodeAuthBlockingToken(req.body.data.jwt) + : handler.platform === "gcfv1" + ? await auth.getAuth(getApp())._verifyAuthBlockingToken(req.body.data.jwt) + : await auth.getAuth(getApp())._verifyAuthBlockingToken(req.body.data.jwt, "run.app"); + let authUserRecord: AuthUserRecord | undefined; + if ( + decodedPayload.event_type === "beforeCreate" || + decodedPayload.event_type === "beforeSignIn" + ) { + authUserRecord = parseAuthUserRecord(decodedPayload.user_record); + } + const authEventContext = parseAuthEventContext(decodedPayload, projectId); + + let authResponse; + if (handler.platform === "gcfv1") { + authResponse = authUserRecord + ? (await (handler as HandlerV1)(authUserRecord, authEventContext)) || undefined + : (await (handler as HandlerV1)(authEventContext)) || undefined; + } else { + authResponse = + (await (handler as HandlerV2)({ + ...authEventContext, + data: authUserRecord, + } as AuthBlockingEvent)) || undefined; + } + + validateAuthResponse(eventType, authResponse); + const result = generateResponsePayload(authResponse); + + res.status(200); + res.setHeader("Content-Type", "application/json"); + res.send(JSON.stringify(result)); + } catch (err) { + let httpErr: HttpsError = err; + if (!(httpErr instanceof HttpsError)) { + // This doesn't count as an 'explicit' error. + logger.error("Unhandled error", err); + httpErr = new HttpsError("internal", "An unexpected error occurred."); + } + + const { status } = httpErr.httpErrorCode; + const body = { error: httpErr.toJSON() }; + res.setHeader("Content-Type", "application/json"); + res.status(status).send(body); + } + }; +} diff --git a/src/common/providers/tasks.ts b/src/common/providers/tasks.ts new file mode 100644 index 000000000..f2e0f9ec7 --- /dev/null +++ b/src/common/providers/tasks.ts @@ -0,0 +1,241 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; +import { DecodedIdToken } from "firebase-admin/auth"; + +import * as logger from "../../logger"; +import * as https from "./https"; +import { Expression } from "../../params"; +import { ResetValue } from "../options"; + +/** How a task should be retried in the event of a non-2xx return. */ +export interface RetryConfig { + /** + * Maximum number of times a request should be attempted. + * If left unspecified, will default to 3. + */ + maxAttempts?: number | Expression | ResetValue; + + /** + * Maximum amount of time for retrying failed task. + * If left unspecified will retry indefinitely. + */ + maxRetrySeconds?: number | Expression | ResetValue; + + /** + * The maximum amount of time to wait between attempts. + * If left unspecified will default to 1hr. + */ + maxBackoffSeconds?: number | Expression | ResetValue; + + /** + * The maximum number of times to double the backoff between + * retries. If left unspecified will default to 16. + */ + maxDoublings?: number | Expression | ResetValue; + + /** + * The minimum time to wait between attempts. If left unspecified + * will default to 100ms. + */ + minBackoffSeconds?: number | Expression | ResetValue; +} + +/** How congestion control should be applied to the function. */ +export interface RateLimits { + /** + * The maximum number of requests that can be processed at a time. + * If left unspecified, will default to 1000. + */ + maxConcurrentDispatches?: number | Expression | ResetValue; + + /** + * The maximum number of requests that can be invoked per second. + * If left unspecified, will default to 500. + */ + maxDispatchesPerSecond?: number | Expression | ResetValue; +} + +/** Metadata about the authorization used to invoke a function. */ +export interface AuthData { + uid: string; + token: DecodedIdToken; + rawToken: string; +} + +/** Metadata about a call to a Task Queue function. */ +export interface TaskContext { + /** + * The result of decoding and verifying an ODIC token. + */ + auth?: AuthData; + + /** + * The name of the queue. + * Populated via the `X-CloudTasks-QueueName` header. + */ + queueName: string; + + /** + * The "short" name of the task, or, if no name was specified at creation, a unique + * system-generated id. + * This is the "my-task-id" value in the complete task name, such as "task_name = + * projects/my-project-id/locations/my-location/queues/my-queue-id/tasks/my-task-id." + * Populated via the `X-CloudTasks-TaskName` header. + */ + id: string; + + /** + * The number of times this task has been retried. + * For the first attempt, this value is 0. This number includes attempts where the task failed + * due to 5XX error codes and never reached the execution phase. + * Populated via the `X-CloudTasks-TaskRetryCount` header. + */ + retryCount: number; + + /** + * The total number of times that the task has received a response from the handler. + * Since Cloud Tasks deletes the task once a successful response has been received, all + * previous handler responses were failures. This number does not include failures due to 5XX + * error codes. + * Populated via the `X-CloudTasks-TaskExecutionCount` header. + */ + executionCount: number; + + /** + * The schedule time of the task, as an RFC 3339 string in UTC time zone. + * Populated via the `X-CloudTasks-TaskETA` header, which uses seconds since January 1 1970. + */ + scheduledTime: string; + + /** + * The HTTP response code from the previous retry. + * Populated via the `X-CloudTasks-TaskPreviousResponse` header + */ + previousResponse?: number; + + /** + * The reason for retrying the task. + * Populated via the `X-CloudTasks-TaskRetryReason` header. + */ + retryReason?: string; + + /** + * Raw request headers. + */ + headers?: Record; +} + +/** + * The request used to call a task queue function. + */ +export type Request = TaskContext & { + /** + * The parameters used by a client when calling this function. + */ + data: T; +}; + +type v1TaskHandler = (data: any, context: TaskContext) => void | Promise; +type v2TaskHandler = (request: Request) => void | Promise; + +/** @internal */ +export function onDispatchHandler( + handler: v1TaskHandler | v2TaskHandler +): (req: https.Request, res: express.Response) => Promise { + return async (req: https.Request, res: express.Response): Promise => { + try { + if (!https.isValidRequest(req)) { + logger.error("Invalid request, unable to process."); + throw new https.HttpsError("invalid-argument", "Bad Request"); + } + + const headers: Record = {}; + for (const [key, value] of Object.entries(req.headers)) { + if (!Array.isArray(value)) { + headers[key] = value; + } + } + + const context: TaskContext = { + queueName: req.header("X-CloudTasks-QueueName"), + id: req.header("X-CloudTasks-TaskName"), + retryCount: req.header("X-CloudTasks-TaskRetryCount") + ? Number(req.header("X-CloudTasks-TaskRetryCount")) + : undefined, + executionCount: req.header("X-CloudTasks-TaskExecutionCount") + ? Number(req.header("X-CloudTasks-TaskExecutionCount")) + : undefined, + scheduledTime: req.header("X-CloudTasks-TaskETA"), + previousResponse: req.header("X-CloudTasks-TaskPreviousResponse") + ? Number(req.header("X-CloudTasks-TaskPreviousResponse")) + : undefined, + retryReason: req.header("X-CloudTasks-TaskRetryReason"), + headers, + }; + + if (!process.env.FUNCTIONS_EMULATOR) { + const authHeader = req.header("Authorization") || ""; + const token = authHeader.match(/^Bearer (.*)$/)?.[1]; + // Note: this should never happen since task queue functions are guarded by IAM. + if (!token) { + throw new https.HttpsError("unauthenticated", "Unauthenticated"); + } + // We skip authenticating the token since tq functions are guarded by IAM. + const authToken = https.unsafeDecodeIdToken(token); + context.auth = { + uid: authToken.uid, + token: authToken, + rawToken: token, + }; + } + + const data: Req = https.decode(req.body.data); + if (handler.length === 2) { + await handler(data, context); + } else { + const arg: Request = { + ...context, + data, + }; + // For some reason the type system isn't picking up that the handler + // is a one argument function. + await (handler as v2TaskHandler)(arg); + } + + res.status(204).end(); + } catch (err) { + let httpErr: https.HttpsError = err; + if (!(err instanceof https.HttpsError)) { + // This doesn't count as an 'explicit' error. + logger.error("Unhandled error", err); + httpErr = new https.HttpsError("internal", "INTERNAL"); + } + + const { status } = httpErr.httpErrorCode; + const body = { error: httpErr.toJSON() }; + + res.status(status).send(body); + } + }; +} diff --git a/src/common/timezone.ts b/src/common/timezone.ts new file mode 100644 index 000000000..121eec5b5 --- /dev/null +++ b/src/common/timezone.ts @@ -0,0 +1,542 @@ +export const tzDatabase: Record = { + "Africa/Abidjan": "+00:00", + "Africa/Accra": "+00:00", + "Africa/Addis_Ababa": "+03:00", + "Africa/Algiers": "+01:00", + "Africa/Asmara": "+03:00", + "Africa/Asmera": "+03:00", + "Africa/Bamako": "+00:00", + "Africa/Bangui": "+01:00", + "Africa/Banjul": "+00:00", + "Africa/Blantyre": "+02:00", + "Africa/Brazzaville": "+01:00", + "Africa/Bujumbura": "+02:00", + "Africa/Cairo": "+02:00", + "Africa/Casablanca": "+00:00", + "Africa/Ceuta": "+01:00", + "Africa/Conakry": "+00:00", + "Africa/Dakar": "+00:00", + "Africa/Dar_es_Salaam": "+03:00", + "Africa/Djibouti": "+03:00", + "Africa/Douala": "+01:00", + "Africa/El_Aaiun": "+00:00", + "Africa/Freetown": "+00:00", + "Africa/Gaborone": "+02:00", + "Africa/Harare": "+02:00", + "Africa/Johannesburg": "+02:00", + "Africa/Juba": "+03:00", + "Africa/Kampala": "+03:00", + "Africa/Khartoum": "+03:00", + "Africa/Kigali": "+02:00", + "Africa/Kinshasa": "+01:00", + "Africa/Lagos": "+01:00", + "Africa/Libreville": "+01:00", + "Africa/Lome": "+00:00", + "Africa/Luanda": "+01:00", + "Africa/Lubumbashi": "+02:00", + "Africa/Lusaka": "+02:00", + "Africa/Malabo": "+01:00", + "Africa/Maputo": "+02:00", + "Africa/Maseru": "+02:00", + "Africa/Mbabane": "+02:00", + "Africa/Mogadishu": "+03:00", + "Africa/Monrovia": "+00:00", + "Africa/Nairobi": "+03:00", + "Africa/Ndjamena": "+01:00", + "Africa/Niamey": "+01:00", + "Africa/Nouakchott": "+00:00", + "Africa/Ouagadougou": "+00:00", + "Africa/Porto-Novo": "+01:00", + "Africa/Sao_Tome": "+00:00", + "Africa/Timbuktu": "+00:00", + "Africa/Tripoli": "+02:00", + "Africa/Tunis": "+01:00", + "Africa/Windhoek": "+01:00", + "America/Adak": "-10:00", + "America/Anchorage": "-09:00", + "America/Anguilla": "-04:00", + "America/Antigua": "-04:00", + "America/Araguaina": "-03:00", + "America/Argentina/Buenos_Aires": "-03:00", + "America/Argentina/Catamarca": "-03:00", + "America/Argentina/ComodRivadavia": "-03:00", + "America/Argentina/Cordoba": "-03:00", + "America/Argentina/Jujuy": "-03:00", + "America/Argentina/La_Rioja": "-03:00", + "America/Argentina/Mendoza": "-03:00", + "America/Argentina/Rio_Gallegos": "-03:00", + "America/Argentina/Salta": "-03:00", + "America/Argentina/San_Juan": "-03:00", + "America/Argentina/San_Luis": "-03:00", + "America/Argentina/Tucuman": "-03:00", + "America/Argentina/Ushuaia": "-03:00", + "America/Aruba": "-04:00", + "America/Asuncion": "-04:00", + "America/Atikokan": "-05:00", + "America/Atka": "-10:00", + "America/Bahia": "-03:00", + "America/Bahia_Banderas": "-06:00", + "America/Barbados": "-04:00", + "America/Belem": "-03:00", + "America/Belize": "-06:00", + "America/Blanc-Sablon": "-04:00", + "America/Boa_Vista": "-04:00", + "America/Bogota": "-05:00", + "America/Boise": "-07:00", + "America/Buenos_Aires": "-03:00", + "America/Cambridge_Bay": "-07:00", + "America/Campo_Grande": "-04:00", + "America/Cancun": "-06:00", + "America/Caracas": "-04:30", + "America/Catamarca": "-03:00", + "America/Cayenne": "-03:00", + "America/Cayman": "-05:00", + "America/Chicago": "-06:00", + "America/Chihuahua": "-07:00", + "America/Coral_Harbour": "-05:00", + "America/Cordoba": "-03:00", + "America/Costa_Rica": "-06:00", + "America/Creston": "-07:00", + "America/Cuiaba": "-04:00", + "America/Curacao": "-04:00", + "America/Danmarkshavn": "+00:00", + "America/Dawson": "-08:00", + "America/Dawson_Creek": "-07:00", + "America/Denver": "-07:00", + "America/Detroit": "-05:00", + "America/Dominica": "-04:00", + "America/Edmonton": "-07:00", + "America/Eirunepe": "-05:00", + "America/El_Salvador": "-06:00", + "America/Ensenada": "-08:00", + "America/Fort_Wayne": "-05:00", + "America/Fortaleza": "-03:00", + "America/Glace_Bay": "-04:00", + "America/Godthab": "-03:00", + "America/Goose_Bay": "-04:00", + "America/Grand_Turk": "-05:00", + "America/Grenada": "-04:00", + "America/Guadeloupe": "-04:00", + "America/Guatemala": "-06:00", + "America/Guayaquil": "-05:00", + "America/Guyana": "-04:00", + "America/Halifax": "-04:00", + "America/Havana": "-05:00", + "America/Hermosillo": "-07:00", + "America/Indiana/Indianapolis": "-05:00", + "America/Indiana/Knox": "-06:00", + "America/Indiana/Marengo": "-05:00", + "America/Indiana/Petersburg": "-05:00", + "America/Indiana/Tell_City": "-06:00", + "America/Indiana/Valparaiso": "-06:00", + "America/Indiana/Vevay": "-05:00", + "America/Indiana/Vincennes": "-05:00", + "America/Indiana/Winamac": "-05:00", + "America/Indianapolis": "-05:00", + "America/Inuvik": "-07:00", + "America/Iqaluit": "-05:00", + "America/Jamaica": "-05:00", + "America/Jujuy": "-03:00", + "America/Juneau": "-09:00", + "America/Kentucky/Louisville": "-05:00", + "America/Kentucky/Monticello": "-05:00", + "America/Knox_IN": "-06:00", + "America/Kralendijk": "-04:00", + "America/La_Paz": "-04:00", + "America/Lima": "-05:00", + "America/Los_Angeles": "-08:00", + "America/Louisville": "-05:00", + "America/Lower_Princes": "-04:00", + "America/Maceio": "-03:00", + "America/Managua": "-06:00", + "America/Manaus": "-04:00", + "America/Marigot": "-04:00", + "America/Martinique": "-04:00", + "America/Matamoros": "-06:00", + "America/Mazatlan": "-07:00", + "America/Mendoza": "-03:00", + "America/Menominee": "-06:00", + "America/Merida": "-06:00", + "America/Metlakatla": "-08:00", + "America/Mexico_City": "-06:00", + "America/Miquelon": "-03:00", + "America/Moncton": "-04:00", + "America/Monterrey": "-06:00", + "America/Montevideo": "-03:00", + "America/Montreal": "-05:00", + "America/Montserrat": "-04:00", + "America/Nassau": "-05:00", + "America/New_York": "-05:00", + "America/Nipigon": "-05:00", + "America/Nome": "-09:00", + "America/Noronha": "-02:00", + "America/North_Dakota/Beulah": "-06:00", + "America/North_Dakota/Center": "-06:00", + "America/North_Dakota/New_Salem": "-06:00", + "America/Ojinaga": "-07:00", + "America/Panama": "-05:00", + "America/Pangnirtung": "-05:00", + "America/Paramaribo": "-03:00", + "America/Phoenix": "-07:00", + "America/Port_of_Spain": "-04:00", + "America/Port-au-Prince": "-05:00", + "America/Porto_Acre": "-05:00", + "America/Porto_Velho": "-04:00", + "America/Puerto_Rico": "-04:00", + "America/Rainy_River": "-06:00", + "America/Rankin_Inlet": "-06:00", + "America/Recife": "-03:00", + "America/Regina": "-06:00", + "America/Resolute": "-06:00", + "America/Rio_Branco": "-05:00", + "America/Rosario": "-03:00", + "America/Santa_Isabel": "-08:00", + "America/Santarem": "-03:00", + "America/Santiago": "-03:00", + "America/Santo_Domingo": "-04:00", + "America/Sao_Paulo": "-03:00", + "America/Scoresbysund": "-01:00", + "America/Shiprock": "-07:00", + "America/Sitka": "-09:00", + "America/St_Barthelemy": "-04:00", + "America/St_Johns": "-03:30", + "America/St_Kitts": "-04:00", + "America/St_Lucia": "-04:00", + "America/St_Thomas": "-04:00", + "America/St_Vincent": "-04:00", + "America/Swift_Current": "-06:00", + "America/Tegucigalpa": "-06:00", + "America/Thule": "-04:00", + "America/Thunder_Bay": "-05:00", + "America/Tijuana": "-08:00", + "America/Toronto": "-05:00", + "America/Tortola": "-04:00", + "America/Vancouver": "-08:00", + "America/Virgin": "-04:00", + "America/Whitehorse": "-08:00", + "America/Winnipeg": "-06:00", + "America/Yakutat": "-09:00", + "America/Yellowknife": "-07:00", + "Antarctica/Casey": "+11:00", + "Antarctica/Davis": "+05:00", + "Antarctica/DumontDUrville": "+10:00", + "Antarctica/Macquarie": "+11:00", + "Antarctica/Mawson": "+05:00", + "Antarctica/McMurdo": "+12:00", + "Antarctica/Palmer": "-04:00", + "Antarctica/Rothera": "-03:00", + "Antarctica/South_Pole": "+12:00", + "Antarctica/Syowa": "+03:00", + "Antarctica/Troll": "+00:00", + "Antarctica/Vostok": "+06:00", + "Arctic/Longyearbyen": "+01:00", + "Asia/Aden": "+03:00", + "Asia/Almaty": "+06:00", + "Asia/Amman": "+02:00", + "Asia/Anadyr": "+12:00", + "Asia/Aqtau": "+05:00", + "Asia/Aqtobe": "+05:00", + "Asia/Ashgabat": "+05:00", + "Asia/Ashkhabad": "+05:00", + "Asia/Baghdad": "+03:00", + "Asia/Bahrain": "+03:00", + "Asia/Baku": "+04:00", + "Asia/Bangkok": "+07:00", + "Asia/Beirut": "+02:00", + "Asia/Bishkek": "+06:00", + "Asia/Brunei": "+08:00", + "Asia/Calcutta": "+05:30", + "Asia/Choibalsan": "+08:00", + "Asia/Chongqing": "+08:00", + "Asia/Chungking": "+08:00", + "Asia/Colombo": "+05:30", + "Asia/Dacca": "+06:00", + "Asia/Damascus": "+02:00", + "Asia/Dhaka": "+06:00", + "Asia/Dili": "+09:00", + "Asia/Dubai": "+04:00", + "Asia/Dushanbe": "+05:00", + "Asia/Gaza": "+02:00", + "Asia/Harbin": "+08:00", + "Asia/Hebron": "+02:00", + "Asia/Ho_Chi_Minh": "+07:00", + "Asia/Hong_Kong": "+08:00", + "Asia/Hovd": "+07:00", + "Asia/Irkutsk": "+08:00", + "Asia/Istanbul": "+02:00", + "Asia/Jakarta": "+07:00", + "Asia/Jayapura": "+09:00", + "Asia/Jerusalem": "+02:00", + "Asia/Kabul": "+04:30", + "Asia/Kamchatka": "+12:00", + "Asia/Karachi": "+05:00", + "Asia/Kashgar": "+08:00", + "Asia/Kathmandu": "+05:45", + "Asia/Katmandu": "+05:45", + "Asia/Khandyga": "+09:00", + "Asia/Kolkata": "+05:30", + "Asia/Krasnoyarsk": "+07:00", + "Asia/Kuala_Lumpur": "+08:00", + "Asia/Kuching": "+08:00", + "Asia/Kuwait": "+03:00", + "Asia/Macao": "+08:00", + "Asia/Macau": "+08:00", + "Asia/Magadan": "+10:00", + "Asia/Makassar": "+08:00", + "Asia/Manila": "+08:00", + "Asia/Muscat": "+04:00", + "Asia/Nicosia": "+02:00", + "Asia/Novokuznetsk": "+07:00", + "Asia/Novosibirsk": "+06:00", + "Asia/Omsk": "+06:00", + "Asia/Oral": "+05:00", + "Asia/Phnom_Penh": "+07:00", + "Asia/Pontianak": "+07:00", + "Asia/Pyongyang": "+09:00", + "Asia/Qatar": "+03:00", + "Asia/Qyzylorda": "+06:00", + "Asia/Rangoon": "+06:30", + "Asia/Riyadh": "+03:00", + "Asia/Saigon": "+07:00", + "Asia/Sakhalin": "+11:00", + "Asia/Samarkand": "+05:00", + "Asia/Seoul": "+09:00", + "Asia/Shanghai": "+08:00", + "Asia/Singapore": "+08:00", + "Asia/Taipei": "+08:00", + "Asia/Tashkent": "+05:00", + "Asia/Tbilisi": "+04:00", + "Asia/Tehran": "+03:30", + "Asia/Tel_Aviv": "+02:00", + "Asia/Thimbu": "+06:00", + "Asia/Thimphu": "+06:00", + "Asia/Tokyo": "+09:00", + "Asia/Ujung_Pandang": "+08:00", + "Asia/Ulaanbaatar": "+08:00", + "Asia/Ulan_Bator": "+08:00", + "Asia/Urumqi": "+08:00", + "Asia/Ust-Nera": "+10:00", + "Asia/Vientiane": "+07:00", + "Asia/Vladivostok": "+10:00", + "Asia/Yakutsk": "+09:00", + "Asia/Yekaterinburg": "+05:00", + "Asia/Yerevan": "+04:00", + "Atlantic/Azores": "-01:00", + "Atlantic/Bermuda": "-04:00", + "Atlantic/Canary": "+00:00", + "Atlantic/Cape_Verde": "-01:00", + "Atlantic/Faeroe": "+00:00", + "Atlantic/Faroe": "+00:00", + "Atlantic/Jan_Mayen": "+01:00", + "Atlantic/Madeira": "+00:00", + "Atlantic/Reykjavik": "+00:00", + "Atlantic/South_Georgia": "-02:00", + "Atlantic/St_Helena": "+00:00", + "Atlantic/Stanley": "-03:00", + "Australia/ACT": "+10:00", + "Australia/Adelaide": "+09:30", + "Australia/Brisbane": "+10:00", + "Australia/Broken_Hill": "+09:30", + "Australia/Canberra": "+10:00", + "Australia/Currie": "+10:00", + "Australia/Darwin": "+09:30", + "Australia/Eucla": "+08:45", + "Australia/Hobart": "+10:00", + "Australia/LHI": "+10:30", + "Australia/Lindeman": "+10:00", + "Australia/Lord_Howe": "+10:30", + "Australia/Melbourne": "+10:00", + "Australia/North": "+09:30", + "Australia/NSW": "+10:00", + "Australia/Perth": "+08:00", + "Australia/Queensland": "+10:00", + "Australia/South": "+09:30", + "Australia/Sydney": "+10:00", + "Australia/Tasmania": "+10:00", + "Australia/Victoria": "+10:00", + "Australia/West": "+08:00", + "Australia/Yancowinna": "+09:30", + "Brazil/Acre": "-05:00", + "Brazil/DeNoronha": "-02:00", + "Brazil/East": "-03:00", + "Brazil/West": "-04:00", + "Canada/Atlantic": "-04:00", + "Canada/Central": "-06:00", + "Canada/Eastern": "-05:00", + "Canada/East-Saskatchewan": "-06:00", + "Canada/Mountain": "-07:00", + "Canada/Newfoundland": "-03:30", + "Canada/Pacific": "-08:00", + "Canada/Saskatchewan": "-06:00", + "Canada/Yukon": "-08:00", + "Chile/Continental": "-03:00", + "Chile/EasterIsland": "-05:00", + Cuba: "-05:00", + Egypt: "+02:00", + Eire: "+00:00", + "Etc/GMT": "+00:00", + "Etc/GMT+0": "+00:00", + "Etc/UCT": "+00:00", + "Etc/Universal": "+00:00", + "Etc/UTC": "+00:00", + "Etc/Zulu": "+00:00", + "Europe/Amsterdam": "+01:00", + "Europe/Andorra": "+01:00", + "Europe/Athens": "+02:00", + "Europe/Belfast": "+00:00", + "Europe/Belgrade": "+01:00", + "Europe/Berlin": "+01:00", + "Europe/Bratislava": "+01:00", + "Europe/Brussels": "+01:00", + "Europe/Bucharest": "+02:00", + "Europe/Budapest": "+01:00", + "Europe/Busingen": "+01:00", + "Europe/Chisinau": "+02:00", + "Europe/Copenhagen": "+01:00", + "Europe/Dublin": "+00:00", + "Europe/Gibraltar": "+01:00", + "Europe/Guernsey": "+00:00", + "Europe/Helsinki": "+02:00", + "Europe/Isle_of_Man": "+00:00", + "Europe/Istanbul": "+02:00", + "Europe/Jersey": "+00:00", + "Europe/Kaliningrad": "+02:00", + "Europe/Kiev": "+02:00", + "Europe/Lisbon": "+00:00", + "Europe/Ljubljana": "+01:00", + "Europe/London": "+00:00", + "Europe/Luxembourg": "+01:00", + "Europe/Madrid": "+01:00", + "Europe/Malta": "+01:00", + "Europe/Mariehamn": "+02:00", + "Europe/Minsk": "+03:00", + "Europe/Monaco": "+01:00", + "Europe/Moscow": "+03:00", + "Europe/Nicosia": "+02:00", + "Europe/Oslo": "+01:00", + "Europe/Paris": "+01:00", + "Europe/Podgorica": "+01:00", + "Europe/Prague": "+01:00", + "Europe/Riga": "+02:00", + "Europe/Rome": "+01:00", + "Europe/Samara": "+04:00", + "Europe/San_Marino": "+01:00", + "Europe/Sarajevo": "+01:00", + "Europe/Simferopol": "+03:00", + "Europe/Skopje": "+01:00", + "Europe/Sofia": "+02:00", + "Europe/Stockholm": "+01:00", + "Europe/Tallinn": "+02:00", + "Europe/Tirane": "+01:00", + "Europe/Tiraspol": "+02:00", + "Europe/Uzhgorod": "+02:00", + "Europe/Vaduz": "+01:00", + "Europe/Vatican": "+01:00", + "Europe/Vienna": "+01:00", + "Europe/Vilnius": "+02:00", + "Europe/Volgograd": "+03:00", + "Europe/Warsaw": "+01:00", + "Europe/Zagreb": "+01:00", + "Europe/Zaporozhye": "+02:00", + "Europe/Zurich": "+01:00", + GB: "+00:00", + "GB-Eire": "+00:00", + GMT: "+00:00", + "GMT+0": "+00:00", + GMT0: "+00:00", + "GMT-0": "+00:00", + Greenwich: "+00:00", + Hongkong: "+08:00", + Iceland: "+00:00", + "Indian/Antananarivo": "+03:00", + "Indian/Chagos": "+06:00", + "Indian/Christmas": "+07:00", + "Indian/Cocos": "+06:30", + "Indian/Comoro": "+03:00", + "Indian/Kerguelen": "+05:00", + "Indian/Mahe": "+04:00", + "Indian/Maldives": "+05:00", + "Indian/Mauritius": "+04:00", + "Indian/Mayotte": "+03:00", + "Indian/Reunion": "+04:00", + Iran: "+03:30", + Israel: "+02:00", + Jamaica: "-05:00", + Japan: "+09:00", + Kwajalein: "+12:00", + Libya: "+02:00", + "Mexico/BajaNorte": "-08:00", + "Mexico/BajaSur": "-07:00", + "Mexico/General": "-06:00", + Navajo: "-07:00", + NZ: "+12:00", + "NZ-CHAT": "+12:45", + "Pacific/Apia": "+13:00", + "Pacific/Auckland": "+12:00", + "Pacific/Chatham": "+12:45", + "Pacific/Chuuk": "+10:00", + "Pacific/Easter": "-06:00", + "Pacific/Efate": "+11:00", + "Pacific/Enderbury": "+13:00", + "Pacific/Fakaofo": "+13:00", + "Pacific/Fiji": "+12:00", + "Pacific/Funafuti": "+12:00", + "Pacific/Galapagos": "-06:00", + "Pacific/Gambier": "-09:00", + "Pacific/Guadalcanal": "+11:00", + "Pacific/Guam": "+10:00", + "Pacific/Honolulu": "-10:00", + "Pacific/Johnston": "-10:00", + "Pacific/Kiritimati": "+14:00", + "Pacific/Kosrae": "+11:00", + "Pacific/Kwajalein": "+12:00", + "Pacific/Majuro": "+12:00", + "Pacific/Marquesas": "-09:30", + "Pacific/Midway": "-11:00", + "Pacific/Nauru": "+12:00", + "Pacific/Niue": "-11:00", + "Pacific/Norfolk": "+11:30", + "Pacific/Noumea": "+11:00", + "Pacific/Pago_Pago": "-11:00", + "Pacific/Palau": "+09:00", + "Pacific/Pitcairn": "-08:00", + "Pacific/Pohnpei": "+11:00", + "Pacific/Ponape": "+11:00", + "Pacific/Port_Moresby": "+10:00", + "Pacific/Rarotonga": "-10:00", + "Pacific/Saipan": "+10:00", + "Pacific/Samoa": "-11:00", + "Pacific/Tahiti": "-10:00", + "Pacific/Tarawa": "+12:00", + "Pacific/Tongatapu": "+13:00", + "Pacific/Truk": "+10:00", + "Pacific/Wake": "+12:00", + "Pacific/Wallis": "+12:00", + "Pacific/Yap": "+10:00", + Poland: "+01:00", + Portugal: "+00:00", + PRC: "+08:00", + ROC: "+08:00", + ROK: "+09:00", + Singapore: "+08:00", + Turkey: "+02:00", + UCT: "+00:00", + Universal: "+00:00", + "US/Alaska": "-09:00", + "US/Aleutian": "-10:00", + "US/Arizona": "-07:00", + "US/Central": "-06:00", + "US/Eastern": "-05:00", + "US/East-Indiana": "-05:00", + "US/Hawaii": "-10:00", + "US/Indiana-Starke": "-06:00", + "US/Michigan": "-05:00", + "US/Mountain": "-07:00", + "US/Pacific": "-08:00", + "US/Samoa": "-11:00", + UTC: "+00:00", + "W-SU": "+03:00", + Zulu: "+00:00", +}; + +export type timezone = keyof typeof tzDatabase; diff --git a/src/common/trace.ts b/src/common/trace.ts new file mode 100644 index 000000000..65d9894cc --- /dev/null +++ b/src/common/trace.ts @@ -0,0 +1,81 @@ +import { AsyncLocalStorage } from "async_hooks"; + +export const traceContext = new AsyncLocalStorage(); + +export interface TraceContext { + version: string; + traceId: string; + parentId: string; + sample: boolean; +} + +/** + * A regex to match the Cloud Trace header. + * - ([A-Fa-f0-9]{32}): The trace id, a 32 character hex value. (e.g. 4bf92f3577b34da6a3ce929d0e0e4736) + * - ([0-9]+): The parent span id, a 64 bit integer. (e.g. 00f067aa0ba902b7) + * - (?:;o=([0-3])): The trace mask, 1-3 denote it should be traced. + */ +const CLOUD_TRACE_REGEX = new RegExp( + "^(?[A-Fa-f0-9]{32})/" + "(?[0-9]+)" + "(?:;o=(?[0-3]))?$" +); +const CLOUD_TRACE_HEADER = "X-Cloud-Trace-Context"; + +function matchCloudTraceHeader(carrier: unknown): TraceContext | undefined { + let header: unknown = carrier?.[CLOUD_TRACE_HEADER]; + if (!header) { + // try lowercase header + header = carrier?.[CLOUD_TRACE_HEADER.toLowerCase()]; + } + if (header && typeof header === "string") { + const matches = CLOUD_TRACE_REGEX.exec(header); + if (matches && matches.groups) { + const { traceId, parentIdInt, traceMask } = matches.groups; + // Convert parentId from unsigned int to hex + const parentId = parseInt(parentIdInt); + if (isNaN(parentId)) { + // Ignore traces with invalid parentIds + return; + } + const sample = !!traceMask && traceMask !== "0"; + return { traceId, parentId: parentId.toString(16), sample, version: "00" }; + } + } +} + +/** + * A regex to match the traceparent header. + * - ^([a-f0-9]{2}): The specification version (e.g. 00) + * - ([a-f0-9]{32}): The trace id, a 16-byte array. (e.g. 4bf92f3577b34da6a3ce929d0e0e4736) + * - ([a-f0-9]{16}): The parent span id, an 8-byte array. (e.g. 00f067aa0ba902b7) + * - ([a-f0-9]{2}: The sampled flag. (e.g. 00) + */ +const TRACEPARENT_REGEX = new RegExp( + "^(?[a-f0-9]{2})-" + + "(?[a-f0-9]{32})-" + + "(?[a-f0-9]{16})-" + + "(?[a-f0-9]{2})$" +); +const TRACEPARENT_HEADER = "traceparent"; + +function matchTraceparentHeader(carrier: unknown): TraceContext | undefined { + const header: unknown = carrier?.[TRACEPARENT_HEADER]; + if (header && typeof header === "string") { + const matches = TRACEPARENT_REGEX.exec(header); + if (matches && matches.groups) { + const { version, traceId, parentId, flag } = matches.groups; + const sample = flag === "01"; + return { traceId, parentId, sample, version }; + } + } +} + +/** + * Extracts trace context from given carrier object, if any. + * + * Supports Cloud Trace and traceparent format. + * + * @param carrier + */ +export function extractTraceContext(carrier: unknown): TraceContext | undefined { + return matchCloudTraceHeader(carrier) || matchTraceparentHeader(carrier); +} diff --git a/src/common/utilities/assertions.ts b/src/common/utilities/assertions.ts new file mode 100644 index 000000000..f63b08eca --- /dev/null +++ b/src/common/utilities/assertions.ts @@ -0,0 +1,14 @@ +/** @hidden + * @file Provides common assertion helpers which can be used to improve + * strictness of both type checking and runtime. + */ + +/** + * Checks that the given value is of type `never` — the type that’s left after + * all other cases have been removed. + * + * @param x A value of type `never`. + */ +export function assertNever(x: never): never { + throw new Error(`Unhandled discriminated union member: ${JSON.stringify(x)}.`); +} diff --git a/src/encoder.ts b/src/common/utilities/encoder.ts similarity index 91% rename from src/encoder.ts rename to src/common/utilities/encoder.ts index dfa010a0b..ecdc86550 100644 --- a/src/encoder.ts +++ b/src/common/utilities/encoder.ts @@ -21,11 +21,11 @@ // SOFTWARE. export function dateToTimestampProto(timeString?: string) { - if (typeof timeString === 'undefined') { + if (typeof timeString === "undefined") { return; } - let date = new Date(timeString); - let seconds = Math.floor(date.getTime() / 1000); + const date = new Date(timeString); + const seconds = Math.floor(date.getTime() / 1000); let nanos = 0; if (timeString.length > 20) { const nanoString = timeString.substring(20, timeString.length - 1); @@ -33,4 +33,4 @@ export function dateToTimestampProto(timeString?: string) { nanos = parseInt(nanoString, 10) * Math.pow(10, trailingZeroes); } return { seconds, nanos }; -}; +} diff --git a/src/common/utilities/path-pattern.ts b/src/common/utilities/path-pattern.ts new file mode 100644 index 000000000..8548a850f --- /dev/null +++ b/src/common/utilities/path-pattern.ts @@ -0,0 +1,172 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { pathParts } from "./path"; + +/** https://cloud.google.com/eventarc/docs/path-patterns */ + +/** @hidden */ +const WILDCARD_CAPTURE_REGEX = new RegExp("{[^/{}]+}", "g"); + +/** @internal */ +export function trimParam(param: string) { + const paramNoBraces = param.slice(1, -1); + if (paramNoBraces.includes("=")) { + return paramNoBraces.slice(0, paramNoBraces.indexOf("=")); + } + return paramNoBraces; +} + +/** @hidden */ +type SegmentName = "segment" | "single-capture" | "multi-capture"; + +/** @hidden */ +interface PathSegment { + readonly name: SegmentName; + readonly value: string; + readonly trimmed: string; + isSingleSegmentWildcard(): boolean; + isMultiSegmentWildcard(): boolean; +} + +/** @hidden */ +class Segment implements PathSegment { + readonly name = "segment"; + readonly trimmed: string; + constructor(readonly value: string) { + this.trimmed = value; + } + isSingleSegmentWildcard(): boolean { + return this.value.includes("*") && !this.isMultiSegmentWildcard(); + } + isMultiSegmentWildcard(): boolean { + return this.value.includes("**"); + } +} + +/** @hidden */ +class SingleCaptureSegment implements PathSegment { + readonly name = "single-capture"; + readonly trimmed: string; + constructor(readonly value: string) { + this.trimmed = trimParam(value); + } + isSingleSegmentWildcard(): boolean { + return true; + } + isMultiSegmentWildcard(): boolean { + return false; + } +} + +/** @hidden */ +class MultiCaptureSegment implements PathSegment { + readonly name = "multi-capture"; + readonly trimmed: string; + constructor(readonly value: string) { + this.trimmed = trimParam(value); + } + isSingleSegmentWildcard(): boolean { + return false; + } + isMultiSegmentWildcard(): boolean { + return true; + } +} + +/** + * Implements Eventarc's path pattern from the spec https://cloud.google.com/eventarc/docs/path-patterns + * @internal + */ +export class PathPattern { + /** @throws on validation error */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + static compile(rawPath: string) { + return undefined; + } + private segments: PathSegment[]; + + constructor(private raw: string) { + this.segments = []; + this.initPathSegments(raw); + } + + getValue(): string { + return this.raw; + } + + // If false, we don't need to use pathPattern as our eventarc match type. + hasWildcards(): boolean { + return this.segments.some( + (segment) => segment.isSingleSegmentWildcard() || segment.isMultiSegmentWildcard() + ); + } + + hasCaptures(): boolean { + return this.segments.some( + (segment) => segment.name === "single-capture" || segment.name === "multi-capture" + ); + } + + extractMatches(path: string): Record { + const matches: Record = {}; + if (!this.hasCaptures()) { + return matches; + } + const pathSegments = pathParts(path); + let pathNdx = 0; + + for ( + let segmentNdx = 0; + segmentNdx < this.segments.length && pathNdx < pathSegments.length; + segmentNdx++ + ) { + const segment = this.segments[segmentNdx]; + const remainingSegments = this.segments.length - 1 - segmentNdx; + const nextPathNdx = pathSegments.length - remainingSegments; + if (segment.name === "single-capture") { + matches[segment.trimmed] = pathSegments[pathNdx]; + } else if (segment.name === "multi-capture") { + matches[segment.trimmed] = pathSegments.slice(pathNdx, nextPathNdx).join("/"); + } + pathNdx = segment.isMultiSegmentWildcard() ? nextPathNdx : pathNdx + 1; + } + + return matches; + } + + private initPathSegments(raw: string) { + const parts = pathParts(raw); + for (const part of parts) { + let segment: PathSegment; + const capture = part.match(WILDCARD_CAPTURE_REGEX); + if (capture && capture.length === 1) { + segment = part.includes("**") + ? new MultiCaptureSegment(part) + : new SingleCaptureSegment(part); + } else { + segment = new Segment(part); + } + this.segments.push(segment); + } + } +} diff --git a/src/common/utilities/path.ts b/src/common/utilities/path.ts new file mode 100644 index 000000000..6b56450b6 --- /dev/null +++ b/src/common/utilities/path.ts @@ -0,0 +1,33 @@ +/** @hidden + * Removes leading and trailing slashes from a path. + * + * @param path A path to normalize, in POSIX format. + */ +export function normalizePath(path: string): string { + if (!path) { + return ""; + } + return path.replace(/^\//, "").replace(/\/$/, ""); +} + +/** + * Normalizes a given path and splits it into an array of segments. + * + * @param path A path to split, in POSIX format. + */ +export function pathParts(path: string): string[] { + if (!path || path === "" || path === "/") { + return []; + } + return normalizePath(path).split("/"); +} + +/** + * Normalizes given paths and joins these together using a POSIX separator. + * + * @param base A first path segment, in POSIX format. + * @param child A second path segment, in POSIX format. + */ +export function joinPath(base: string, child: string) { + return pathParts(base).concat(pathParts(child)).join("/"); +} diff --git a/src/common/utilities/utils.ts b/src/common/utilities/utils.ts new file mode 100644 index 000000000..0bcfc1ad1 --- /dev/null +++ b/src/common/utilities/utils.ts @@ -0,0 +1,52 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +function isObject(obj: any): boolean { + return typeof obj === "object" && !!obj; +} + +/** @hidden */ +export function applyChange(src: any, dest: any) { + // if not mergeable, don't merge + if (!isObject(dest) || !isObject(src)) { + return dest; + } + + return merge(src, dest); +} + +function merge(src: Record, dest: Record): Record { + const res: Record = {}; + const keys = new Set([...Object.keys(src), ...Object.keys(dest)]); + + for (const key of keys.values()) { + if (key in dest) { + if (dest[key] === null) { + continue; + } + res[key] = applyChange(src[key], dest[key]); + } else if (src[key] !== null) { + res[key] = src[key]; + } + } + return res; +} diff --git a/src/config.ts b/src/config.ts deleted file mode 100644 index a1cfc7090..000000000 --- a/src/config.ts +++ /dev/null @@ -1,98 +0,0 @@ - -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as firebase from 'firebase-admin'; - -export function config(): config.Config { - if (typeof config.singleton === 'undefined') { - init(); - } - return config.singleton; -} - -export namespace config { - // Config type is usable as a object (dot notation allowed), and firebase - // property will also code complete. - export type Config = { [key: string]: any }; - - /** @internal */ - export let singleton: config.Config; -} - -/* @internal */ -export function firebaseConfig(): firebase.AppOptions | null { - - // The FIREBASE_PROJECT environment variable was introduced to help local emulation with `firebase-tools` 3.18 - // Unfortunately, API review decided that the name should be FIREBASE_CONFIG to avoid confusions that Firebase has - // a separate project from Google Cloud. This accepts both versions, preferring the documented name. - const env = process.env.FIREBASE_CONFIG || process.env.FIREBASE_PROJECT; - if (env) { - return JSON.parse(env); - } - - // Could have Runtime Config with Firebase in it as an ENV value. - try { - const config = JSON.parse(process.env.CLOUD_RUNTIME_CONFIG); - if (config.firebase) { - return config.firebase; - } - } catch (e) { - // Do nothing - } - - // Could have Runtime Config with Firebase in it as an ENV location or default. - try { - const path = process.env.CLOUD_RUNTIME_CONFIG || '../../../.runtimeconfig.json'; - const config = require(path); - if (config.firebase) { - return config.firebase; - } - } catch (e) { - // Do nothing - } - - return null; -} - -function init() { - try { - const parsed = JSON.parse(process.env.CLOUD_RUNTIME_CONFIG); - delete parsed.firebase; - config.singleton = parsed; - return; - } catch (e) { - // Do nothing - } - - try { - let path = process.env.CLOUD_RUNTIME_CONFIG || '../../../.runtimeconfig.json'; - const parsed = require(path); - delete parsed.firebase; - config.singleton = parsed; - return; - } catch (e) { - // Do nothing - } - - config.singleton = {}; -} diff --git a/src/function-configuration.ts b/src/function-configuration.ts new file mode 100644 index 000000000..e69de29bb diff --git a/src/index.ts b/src/index.ts deleted file mode 100644 index f5ebbb6d7..000000000 --- a/src/index.ts +++ /dev/null @@ -1,57 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -// Providers: -import * as analytics from './providers/analytics'; -import * as auth from './providers/auth'; -import * as crashlytics from './providers/crashlytics'; -import * as database from './providers/database'; -import * as firestore from './providers/firestore'; -import * as https from './providers/https'; -import * as pubsub from './providers/pubsub'; -import * as storage from './providers/storage'; -import { firebaseConfig } from './config'; -export { analytics, auth, crashlytics, database, firestore, https, pubsub, storage }; - -// Exported root types: -export * from './config'; -export * from './cloud-functions'; - -// TEMPORARY WORKAROUND (BUG 63586213): -// Until the Cloud Functions builder can publish FIREBASE_CONFIG, automatically provide it on import based on what -// we can deduce. -if (!process.env.FIREBASE_CONFIG) { - const cfg = firebaseConfig(); - if (cfg) { - process.env.FIREBASE_CONFIG = JSON.stringify(cfg); - - } else if (process.env.GCLOUD_PROJECT) { - console.warn('Warning, estimating Firebase Config based on GCLOUD_PROJECT. Intializing firebase-admin may fail'); - process.env.FIREBASE_CONFIG = JSON.stringify({ - databaseURL: `https://${process.env.GCLOUD_PROJECT}.firebaseio.com`, - storageBucket: `${process.env.GCLOUD_PROJECT}.appspot.com`, - projectId: process.env.GCLOUD_PROJECT, - }); - } else { - console.warn('Warning, FIREBASE_CONFIG environment variable is missing. Initializing firebase-admin will fail'); - } -} diff --git a/src/logger/common.ts b/src/logger/common.ts new file mode 100644 index 000000000..32ef0e596 --- /dev/null +++ b/src/logger/common.ts @@ -0,0 +1,46 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// Map LogSeverity types to their equivalent `console.*` method. +/** @hidden */ +export const CONSOLE_SEVERITY: { + [severity: string]: "debug" | "info" | "warn" | "error"; +} = { + DEBUG: "debug", + INFO: "info", + NOTICE: "info", + WARNING: "warn", + ERROR: "error", + CRITICAL: "error", + ALERT: "error", + EMERGENCY: "error", +}; + +// safely preserve unpatched console.* methods in case of compat require +/** @hidden */ +export const UNPATCHED_CONSOLE = { + debug: console.debug, + info: console.info, + log: console.log, + warn: console.warn, + error: console.error, +}; diff --git a/src/logger/compat.ts b/src/logger/compat.ts new file mode 100644 index 000000000..02b819ddc --- /dev/null +++ b/src/logger/compat.ts @@ -0,0 +1,43 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { format } from "util"; +import { CONSOLE_SEVERITY, UNPATCHED_CONSOLE } from "./common"; + +/** @hidden */ +function patchedConsole(severity: string): (data: any, ...args: any[]) => void { + return function (data: any, ...args: any[]): void { + let message = format(data, ...args); + if (severity === "ERROR") { + message = new Error(message).stack || message; + } + + UNPATCHED_CONSOLE[CONSOLE_SEVERITY[severity]](JSON.stringify({ severity, message })); + }; +} + +// IMPORTANT -- "../logger" must be imported before monkeypatching! +console.debug = patchedConsole("DEBUG"); +console.info = patchedConsole("INFO"); +console.log = patchedConsole("INFO"); +console.warn = patchedConsole("WARNING"); +console.error = patchedConsole("ERROR"); diff --git a/src/logger/index.ts b/src/logger/index.ts new file mode 100644 index 000000000..585da4c20 --- /dev/null +++ b/src/logger/index.ts @@ -0,0 +1,194 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { format } from "util"; +import { traceContext } from "../common/trace"; + +import { CONSOLE_SEVERITY, UNPATCHED_CONSOLE } from "./common"; + +/** + * `LogSeverity` indicates the detailed severity of the log entry. See [LogSeverity](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity). + * @public + */ +export type LogSeverity = + | "DEBUG" + | "INFO" + | "NOTICE" + | "WARNING" + | "ERROR" + | "CRITICAL" + | "ALERT" + | "EMERGENCY"; + +/** + * `LogEntry` represents a [structured Cloud Logging](https://cloud.google.com/logging/docs/structured-logging) + * entry. All keys aside from `severity` and `message` are + * included in the `jsonPayload` of the logged entry. + * @public + */ +export interface LogEntry { + severity: LogSeverity; + message?: string; + [key: string]: any; +} + +/** @internal */ +function removeCircular(obj: any, refs: Set = new Set()): any { + if (typeof obj !== "object" || !obj) { + return obj; + } + // If the object defines its own toJSON, prefer that. + if (obj.toJSON && typeof obj.toJSON === "function") { + return obj.toJSON(); + } + if (refs.has(obj)) { + return "[Circular]"; + } else { + refs.add(obj); + } + let returnObj: any; + if (Array.isArray(obj)) { + returnObj = new Array(obj.length); + } else { + returnObj = {}; + } + for (const k in obj) { + if (obj.hasOwnProperty(k)) { + try { + if (refs.has(obj[k])) { + returnObj[k] = "[Circular]"; + } else { + returnObj[k] = removeCircular(obj[k], refs); + } + } catch { + returnObj[k] = "[Error - cannot serialize]"; + } + } else { + returnObj[k] = "[Error - defined in the prototype but missing in the object]"; + } + } + refs.delete(obj); + return returnObj; +} + +/** + * Writes a `LogEntry` to `stdout`/`stderr` (depending on severity). + * @param entry - The `LogEntry` including severity, message, and any additional structured metadata. + * @public + */ +export function write(entry: LogEntry) { + const ctx = traceContext.getStore(); + if (ctx?.traceId) { + entry[ + "logging.googleapis.com/trace" + ] = `projects/${process.env.GCLOUD_PROJECT}/traces/${ctx.traceId}`; + } + + UNPATCHED_CONSOLE[CONSOLE_SEVERITY[entry.severity]](JSON.stringify(removeCircular(entry))); +} + +/** + * Writes a `DEBUG` severity log. If the last argument provided is a plain object, + * it is added to the `jsonPayload` in the Cloud Logging entry. + * @param args - Arguments, concatenated into the log message with space separators. + * @public + */ +export function debug(...args: any[]) { + write(entryFromArgs("DEBUG", args)); +} + +/** + * Writes an `INFO` severity log. If the last argument provided is a plain object, + * it is added to the `jsonPayload` in the Cloud Logging entry. + * @param args - Arguments, concatenated into the log message with space separators. + * @public + */ +export function log(...args: any[]) { + write(entryFromArgs("INFO", args)); +} + +/** + * Writes an `INFO` severity log. If the last argument provided is a plain object, + * it is added to the `jsonPayload` in the Cloud Logging entry. + * @param args - Arguments, concatenated into the log message with space separators. + * @public + */ +export function info(...args: any[]) { + write(entryFromArgs("INFO", args)); +} + +/** + * Writes a `WARNING` severity log. If the last argument provided is a plain object, + * it is added to the `jsonPayload` in the Cloud Logging entry. + * @param args - Arguments, concatenated into the log message with space separators. + * @public + */ +export function warn(...args: any[]) { + write(entryFromArgs("WARNING", args)); +} + +/** + * Writes an `ERROR` severity log. If the last argument provided is a plain object, + * it is added to the `jsonPayload` in the Cloud Logging entry. + * @param args - Arguments, concatenated into the log message with space separators. + * @public + */ +export function error(...args: any[]) { + write(entryFromArgs("ERROR", args)); +} + +/** @hidden */ +function entryFromArgs(severity: LogSeverity, args: any[]): LogEntry { + let entry = {}; + const lastArg = args[args.length - 1]; + if (lastArg && typeof lastArg === "object" && lastArg.constructor === Object) { + entry = args.pop(); + } + + // mimic `console.*` behavior, see https://nodejs.org/api/console.html#console_console_log_data_args + let message = format(...args); + if (severity === "ERROR" && !args.find((arg) => arg instanceof Error)) { + message = new Error(message).stack || message; + } + const out: LogEntry = { + ...entry, + severity, + }; + if (message) { + out.message = message; + } + return out; +} + +/** + * Logger object containing all logging methods. + * + * Mockable for testing purposes. + */ +export const logger = { + write, + debug, + log, + info, + warn, + error, +}; diff --git a/src/params/index.ts b/src/params/index.ts new file mode 100644 index 000000000..cde9ecf3c --- /dev/null +++ b/src/params/index.ts @@ -0,0 +1,204 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * @hidden + * @alpha + */ + +import { + BooleanParam, + Expression, + FloatParam, + IntParam, + Param, + ParamOptions, + SecretParam, + JsonSecretParam, + StringParam, + ListParam, + InternalExpression, +} from "./types"; + +export { BUCKET_PICKER, select, multiSelect } from "./types"; +export type { TextInput, SelectInput, SelectOptions, MultiSelectInput } from "./types"; + +export { Expression }; +export type { ParamOptions }; + +type SecretOrExpr = Param | SecretParam | JsonSecretParam; +export const declaredParams: SecretOrExpr[] = []; + +/** + * Use a helper to manage the list such that parameters are uniquely + * registered once only but order is preserved. + * @internal + */ +function registerParam(param: SecretOrExpr) { + for (let i = 0; i < declaredParams.length; i++) { + if (declaredParams[i].name === param.name) { + declaredParams.splice(i, 1); + } + } + declaredParams.push(param); +} + +/** + * For testing. + * @internal + */ +export function clearParams() { + declaredParams.splice(0, declaredParams.length); +} + +/** + * A built-in parameter that resolves to the default RTDB database URL associated + * with the project, without prompting the deployer. Empty string if none exists. + */ +export const databaseURL: Param = new InternalExpression( + "DATABASE_URL", + (env: NodeJS.ProcessEnv) => JSON.parse(env.FIREBASE_CONFIG)?.databaseURL || "" +); +/** + * A built-in parameter that resolves to the Cloud project ID associated with + * the project, without prompting the deployer. + */ +export const projectID: Param = new InternalExpression( + "PROJECT_ID", + (env: NodeJS.ProcessEnv) => JSON.parse(env.FIREBASE_CONFIG)?.projectId || "" +); +/** + * A built-in parameter that resolves to the Cloud project ID, without prompting + * the deployer. + */ +export const gcloudProject: Param = new InternalExpression( + "GCLOUD_PROJECT", + (env: NodeJS.ProcessEnv) => JSON.parse(env.FIREBASE_CONFIG)?.projectId || "" +); +/** + * A builtin parameter that resolves to the Cloud storage bucket associated + * with the function, without prompting the deployer. Empty string if not + * defined. + */ +export const storageBucket: Param = new InternalExpression( + "STORAGE_BUCKET", + (env: NodeJS.ProcessEnv) => JSON.parse(env.FIREBASE_CONFIG)?.storageBucket || "" +); + +/** + * Declares a secret param, that will persist values only in Cloud Secret Manager. + * Secrets are stored internally as bytestrings. Use `ParamOptions.as` to provide type + * hinting during parameter resolution. + * + * @param name The name of the environment variable to use to load the parameter. + * @returns A parameter with a `string` return type for `.value`. + */ +export function defineSecret(name: string): SecretParam { + const param = new SecretParam(name); + registerParam(param); + return param; +} + +/** + * Declares a secret parameter that retrieves a structured JSON object in Cloud Secret Manager. + * This is useful for managing groups of related configuration values, such as all settings + * for a third-party API, as a single unit. + * + * The secret value must be a valid JSON string. At runtime, the value will be automatically parsed + * and returned as a JavaScript object. If the value is not set or is not valid JSON, an error will be thrown. + * + * @param name The name of the environment variable to use to load the parameter. + * @returns A parameter whose `.value()` method returns the parsed JSON object. + * ``` + */ +export function defineJsonSecret(name: string): JsonSecretParam { + const param = new JsonSecretParam(name); + registerParam(param); + return param; +} + +/** + * Declare a string parameter. + * + * @param name The name of the environment variable to use to load the parameter. + * @param options Configuration options for the parameter. + * @returns A parameter with a `string` return type for `.value`. + */ +export function defineString(name: string, options: ParamOptions = {}): StringParam { + const param = new StringParam(name, options); + registerParam(param); + return param; +} + +/** + * Declare a boolean parameter. + * + * @param name The name of the environment variable to use to load the parameter. + * @param options Configuration options for the parameter. + * @returns A parameter with a `boolean` return type for `.value`. + */ +export function defineBoolean(name: string, options: ParamOptions = {}): BooleanParam { + const param = new BooleanParam(name, options); + registerParam(param); + return param; +} + +/** + * Declare an integer parameter. + * + * @param name The name of the environment variable to use to load the parameter. + * @param options Configuration options for the parameter. + * @returns A parameter with a `number` return type for `.value`. + */ +export function defineInt(name: string, options: ParamOptions = {}): IntParam { + const param = new IntParam(name, options); + registerParam(param); + return param; +} + +/** + * Declare a float parameter. + * + * @param name The name of the environment variable to use to load the parameter. + * @param options Configuration options for the parameter. + * @returns A parameter with a `number` return type for `.value`. + * + * @internal + */ +export function defineFloat(name: string, options: ParamOptions = {}): FloatParam { + const param = new FloatParam(name, options); + registerParam(param); + return param; +} + +/** + * Declare a list parameter. + * + * @param name The name of the environment variable to use to load the parameter. + * @param options Configuration options for the parameter. + * @returns A parameter with a `string[]` return type for `.value`. + */ +export function defineList(name: string, options: ParamOptions = {}): ListParam { + const param = new ListParam(name, options); + registerParam(param); + return param; +} diff --git a/src/params/types.ts b/src/params/types.ts new file mode 100644 index 000000000..e937e2e33 --- /dev/null +++ b/src/params/types.ts @@ -0,0 +1,646 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as logger from "../logger"; + +/* + * A CEL expression which can be evaluated during function deployment, and + * resolved to a value of the generic type parameter: i.e, you can pass + * an Expression as the value of an option that normally accepts numbers. + */ +export abstract class Expression { + /** Returns the expression's runtime value, based on the CLI's resolution of parameters. */ + value(): T { + if (process.env.FUNCTIONS_CONTROL_API === "true") { + logger.warn( + `${this.toString()}.value() invoked during function deployment, instead of during runtime.` + ); + logger.warn( + `This is usually a mistake. In configs, use Params directly without calling .value().` + ); + logger.warn(`example: { memory: memoryParam } not { memory: memoryParam.value() }`); + } + return this.runtimeValue(); + } + + /** @internal */ + runtimeValue(): T { + throw new Error("Not implemented"); + } + + /** Returns the expression's representation as a braced CEL expression. */ + toCEL(): string { + return `{{ ${this.toString()} }}`; + } + + /** Returns the expression's representation as JSON. */ + toJSON(): string { + return this.toString(); + } +} + +function valueOf(arg: T | Expression): T { + return arg instanceof Expression ? arg.runtimeValue() : arg; +} +/** + * Returns how an entity (either an `Expression` or a literal value) should be represented in CEL. + * - Expressions delegate to the `.toString()` method, which is used by the WireManifest + * - Strings have to be quoted explicitly + * - Arrays are represented as []-delimited, parsable JSON + * - Numbers and booleans are not quoted explicitly + */ +function refOf(arg: T | Expression): string { + if (arg instanceof Expression) { + return arg.toString(); + } else if (typeof arg === "string") { + return `"${arg}"`; + } else if (Array.isArray(arg)) { + return JSON.stringify(arg); + } else { + return arg.toString(); + } +} + +/** + * A CEL expression corresponding to a ternary operator, e.g {{ cond ? ifTrue : ifFalse }} + */ +export class TernaryExpression< + T extends string | number | boolean | string[] +> extends Expression { + constructor( + private readonly test: Expression, + private readonly ifTrue: T | Expression, + private readonly ifFalse: T | Expression + ) { + super(); + this.ifTrue = ifTrue; + this.ifFalse = ifFalse; + } + + /** @internal */ + runtimeValue(): T { + return this.test.runtimeValue() ? valueOf(this.ifTrue) : valueOf(this.ifFalse); + } + + toString() { + return `${this.test} ? ${refOf(this.ifTrue)} : ${refOf(this.ifFalse)}`; + } +} + +/** + * A CEL expression that evaluates to boolean true or false based on a comparison + * between the value of another expression and a literal of that same type. + */ +export class CompareExpression< + T extends string | number | boolean | string[] +> extends Expression { + cmp: "==" | "!=" | ">" | ">=" | "<" | "<="; + lhs: Expression; + rhs: T | Expression; + + constructor( + cmp: "==" | "!=" | ">" | ">=" | "<" | "<=", + lhs: Expression, + rhs: T | Expression + ) { + super(); + this.cmp = cmp; + this.lhs = lhs; + this.rhs = rhs; + } + + /** @internal */ + runtimeValue(): boolean { + const left = this.lhs.runtimeValue(); + const right = valueOf(this.rhs); + switch (this.cmp) { + case "==": + return Array.isArray(left) ? this.arrayEquals(left, right as string[]) : left === right; + case "!=": + return Array.isArray(left) ? !this.arrayEquals(left, right as string[]) : left !== right; + case ">": + return left > right; + case ">=": + return left >= right; + case "<": + return left < right; + case "<=": + return left <= right; + default: + throw new Error(`Unknown comparator ${this.cmp}`); + } + } + + /** @internal */ + arrayEquals(a: string[], b: string[]): boolean { + return a.every((item) => b.includes(item)) && b.every((item) => a.includes(item)); + } + + toString() { + const rhsStr = refOf(this.rhs); + return `${this.lhs} ${this.cmp} ${rhsStr}`; + } + + /** Returns a `TernaryExpression` which can resolve to one of two values, based on the resolution of this comparison. */ + thenElse( + ifTrue: retT | Expression, + ifFalse: retT | Expression + ) { + return new TernaryExpression(this, ifTrue, ifFalse); + } +} + +/** @hidden */ +type ParamValueType = "string" | "list" | "boolean" | "int" | "float" | "secret"; + +/** Create a select input from a series of values. */ +export function select(options: T[]): SelectInput; + +/** Create a select input from a map of labels to values. */ +export function select(optionsWithLabels: Record): SelectInput; + +/** Create a select input from a series of values or a map of labels to values */ +export function select(options: T[] | Record): SelectInput { + let wireOpts: SelectOptions[]; + if (Array.isArray(options)) { + wireOpts = options.map((opt) => ({ value: opt })); + } else { + wireOpts = Object.entries(options).map(([label, value]) => ({ label, value })); + } + return { + select: { + options: wireOpts, + }, + }; +} + +/** Create a multi-select input from a series of values. */ +export function multiSelect(options: string[]): MultiSelectInput; + +/** Create a multi-select input from map of labels to values. */ +export function multiSelect(options: Record): MultiSelectInput; + +/** Create a multi-select input from a series of values or map of labels to values. */ +export function multiSelect(options: string[] | Record): MultiSelectInput { + let wireOpts: SelectOptions[]; + if (Array.isArray(options)) { + wireOpts = options.map((opt) => ({ value: opt })); + } else { + wireOpts = Object.entries(options).map(([label, value]) => ({ label, value })); + } + return { + multiSelect: { + options: wireOpts, + }, + }; +} + +type ParamInput = + | TextInput + | SelectInput + | (T extends string[] ? MultiSelectInput : never) + | (T extends string ? ResourceInput : never); + +/** + * Specifies that a parameter's value should be determined by prompting the user + * to type it in interactively at deploy time. Input that does not match the + * provided validationRegex, if present, will be retried. + */ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export interface TextInput { + text: { + example?: string; + /** + * A regular expression (or an escaped string to compile into a regular + * expression) which the prompted text must satisfy; the prompt will retry + * until input matching the regex is provided. + */ + validationRegex?: string | RegExp; + /** + * A custom error message to display when retrying the prompt based on input + * failing to conform to the validationRegex, + */ + validationErrorMessage?: string; + }; +} + +/** + * Specifies that a parameter's value should be determined by having the user + * select from a list containing all the project's resources of a certain + * type. Currently, only type:"storage.googleapis.com/Bucket" is supported. + */ +export interface ResourceInput { + resource: { + type: "storage.googleapis.com/Bucket"; + }; +} + +/** + * Autogenerate a list of buckets in a project that a user can select from. + */ +export const BUCKET_PICKER: ResourceInput = { + resource: { + type: "storage.googleapis.com/Bucket", + }, +}; + +/** + * Specifies that a parameter's value should be determined by having the user select + * from a list of pre-canned options interactively at deploy time. + */ +export interface SelectInput { + select: { + options: Array>; + }; +} + +/** + * Specifies that a parameter's value should be determined by having the user select + * a subset from a list of pre-canned options interactively at deploy time. + * Will result in errors if used on parameters of type other than `string[]`. + */ +export interface MultiSelectInput { + multiSelect: { + options: Array>; + }; +} + +/** + * One of the options provided to a `SelectInput`, containing a value and + * optionally a human-readable label to display in the selection interface. + */ +export interface SelectOptions { + label?: string; + value: T; +} + +/** The wire representation of a parameter when it's sent to the CLI. A superset of `ParamOptions`. */ +export type ParamSpec = { + /** The name of the parameter which will be stored in .env files. Use UPPERCASE. */ + name: string; + /** An optional default value to be used while prompting for input. Can be a literal or another parametrized expression. */ + default?: T | Expression; + /** An optional human-readable string to be used as a replacement for the parameter's name when prompting. */ + label?: string; + /** An optional long-form description of the parameter to be displayed while prompting. */ + description?: string; + /** @internal */ + type: ParamValueType; + /** The way in which the Firebase CLI will prompt for the value of this parameter. Defaults to a TextInput. */ + input?: ParamInput; + /** Optional format annotation for additional type information (e.g., "json" for JSON-encoded secrets). */ + format?: string; +}; + +/** + * Representation of parameters for the stack over the wire. + * + * @remarks + * N.B: a WireParamSpec is just a ParamSpec with default expressions converted into a CEL literal + * + * @alpha + */ +export type WireParamSpec = { + name: string; + default?: T | string; + label?: string; + description?: string; + type: ParamValueType; + input?: ParamInput; + format?: string; +}; + +/** Configuration options which can be used to customize the prompting behavior of a parameter. */ +export type ParamOptions = Omit< + ParamSpec, + "name" | "type" +>; + +/** + * Represents a parametrized value that will be read from .env files if present, + * or prompted for by the CLI if missing. Instantiate these with the defineX + * methods exported by the firebase-functions/params namespace. + */ +export abstract class Param extends Expression { + static type: ParamValueType = "string"; + + constructor(readonly name: string, readonly options: ParamOptions = {}) { + super(); + } + + /** @internal */ + runtimeValue(): T { + throw new Error("Not implemented"); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + cmp(cmp: "==" | "!=" | ">" | ">=" | "<" | "<=", rhs: T | Expression) { + return new CompareExpression(cmp, this, rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + equals(rhs: T | Expression) { + return this.cmp("==", rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + notEquals(rhs: T | Expression) { + return this.cmp("!=", rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + greaterThan(rhs: T | Expression) { + return this.cmp(">", rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + greaterThanOrEqualTo(rhs: T | Expression) { + return this.cmp(">=", rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + lessThan(rhs: T | Expression) { + return this.cmp("<", rhs); + } + + /** Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. */ + lessThanOrEqualTo(rhs: T | Expression) { + return this.cmp("<=", rhs); + } + + /** + * Returns a parametrized expression of Boolean type, based on comparing the value of this parameter to a literal or a different expression. + * @deprecated A typo. Use lessThanOrEqualTo instead. + */ + lessThanorEqualTo(rhs: T | Expression) { + return this.lessThanOrEqualTo(rhs); + } + + toString(): string { + return `params.${this.name}`; + } + + /** @internal */ + toSpec(): WireParamSpec { + const { default: paramDefault, ...otherOptions } = this.options; + + const out: WireParamSpec = { + name: this.name, + ...otherOptions, + type: (this.constructor as typeof Param).type, + }; + + if (paramDefault instanceof Expression) { + out.default = paramDefault.toCEL(); + } else if (paramDefault !== undefined) { + out.default = paramDefault; + } + + if (out.input && "text" in out.input && out.input.text.validationRegex instanceof RegExp) { + out.input.text.validationRegex = out.input.text.validationRegex.source; + } + + return out; + } +} + +/** + * A parametrized string whose value is stored in Cloud Secret Manager + * instead of the local filesystem. Supply instances of SecretParams to + * the secrets array while defining a Function to make their values accessible + * during execution of that Function. + */ +export class SecretParam { + static type: ParamValueType = "secret"; + name: string; + + constructor(name: string) { + this.name = name; + } + + /** @internal */ + runtimeValue(): string { + const val = process.env[this.name]; + if (val === undefined) { + logger.warn( + `No value found for secret parameter "${this.name}". A function can only access a secret if you include the secret in the function's dependency array.` + ); + } + return val || ""; + } + + /** @internal */ + toSpec(): ParamSpec { + return { + type: "secret", + name: this.name, + }; + } + + /** Returns the secret's value at runtime. Throws an error if accessed during deployment. */ + value(): string { + if (process.env.FUNCTIONS_CONTROL_API === "true") { + throw new Error( + `Cannot access the value of secret "${this.name}" during function deployment. Secret values are only available at runtime.` + ); + } + return this.runtimeValue(); + } +} + +/** + * A parametrized object whose value is stored as a JSON string in Cloud Secret Manager. + * This is useful for managing groups of related configuration values, such as all settings + * for a third-party API, as a single unit. Supply instances of JsonSecretParam to the + * secrets array while defining a Function to make their values accessible during execution + * of that Function. + */ +export class JsonSecretParam { + static type: ParamValueType = "secret"; + name: string; + + constructor(name: string) { + this.name = name; + } + + /** @internal */ + runtimeValue(): T { + const val = process.env[this.name]; + if (val === undefined) { + throw new Error( + `No value found for secret parameter "${this.name}". A function can only access a secret if you include the secret in the function's dependency array.` + ); + } + + try { + return JSON.parse(val) as T; + } catch (error) { + throw new Error( + `"${this.name}" could not be parsed as JSON. Please verify its value in Secret Manager. Details: ${error}` + ); + } + } + + /** @internal */ + toSpec(): ParamSpec { + return { + type: "secret", + name: this.name, + format: "json", + }; + } + + /** Returns the secret's parsed JSON value at runtime. Throws an error if accessed during deployment, if the secret is not set, or if the value is not valid JSON. */ + value(): T { + if (process.env.FUNCTIONS_CONTROL_API === "true") { + throw new Error( + `Cannot access the value of secret "${this.name}" during function deployment. Secret values are only available at runtime.` + ); + } + return this.runtimeValue(); + } +} + +/** + * A parametrized value of String type that will be read from .env files + * if present, or prompted for by the CLI if missing. + */ +export class StringParam extends Param { + /** @internal */ + runtimeValue(): string { + return process.env[this.name] || ""; + } +} + +/** + * A CEL expression which represents an internal Firebase variable. This class + * cannot be instantiated by developers, but we provide several canned instances + * of it to make available parameters that will never have to be defined at + * deployment time, and can always be read from process.env. + * @internal + */ +export class InternalExpression extends Param { + constructor(name: string, private readonly getter: (env: NodeJS.ProcessEnv) => string) { + super(name); + } + + /** @internal */ + runtimeValue(): string { + return this.getter(process.env) || ""; + } + + toSpec(): WireParamSpec { + throw new Error("An InternalExpression should never be marshalled for wire transmission."); + } +} + +/** + * A parametrized value of Integer type that will be read from .env files + * if present, or prompted for by the CLI if missing. + */ +export class IntParam extends Param { + static type: ParamValueType = "int"; + + /** @internal */ + runtimeValue(): number { + return parseInt(process.env[this.name] || "0", 10) || 0; + } +} + +/** + * A parametrized value of Float type that will be read from .env files + * if present, or prompted for by the CLI if missing. + */ +export class FloatParam extends Param { + static type: ParamValueType = "float"; + + /** @internal */ + runtimeValue(): number { + return parseFloat(process.env[this.name] || "0") || 0; + } +} + +/** + * A parametrized value of Boolean type that will be read from .env files + * if present, or prompted for by the CLI if missing. + */ +export class BooleanParam extends Param { + static type: ParamValueType = "boolean"; + + /** @internal */ + runtimeValue(): boolean { + return !!process.env[this.name] && process.env[this.name] === "true"; + } + + /** @deprecated */ + then(ifTrue: T | Expression, ifFalse: T | Expression) { + return this.thenElse(ifTrue, ifFalse); + } + + thenElse( + ifTrue: T | Expression, + ifFalse: T | Expression + ) { + return new TernaryExpression(this, ifTrue, ifFalse); + } +} + +/** + * A parametrized value of String[] type that will be read from .env files + * if present, or prompted for by the CLI if missing. + */ +export class ListParam extends Param { + static type: ParamValueType = "list"; + + /** @internal */ + runtimeValue(): string[] { + const val = JSON.parse(process.env[this.name]); + if (!Array.isArray(val) || !(val as string[]).every((v) => typeof v === "string")) { + return []; + } + return val as string[]; + } + + /** @hidden */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + greaterThan(rhs: string[] | Expression): CompareExpression { + throw new Error(">/< comparison operators not supported on params of type List"); + } + + /** @hidden */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + greaterThanOrEqualTo(rhs: string[] | Expression): CompareExpression { + throw new Error(">/< comparison operators not supported on params of type List"); + } + + /** @hidden */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + lessThan(rhs: string[] | Expression): CompareExpression { + throw new Error(">/< comparison operators not supported on params of type List"); + } + + /** @hidden */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + lessThanorEqualTo(rhs: string[] | Expression): CompareExpression { + throw new Error(">/< comparison operators not supported on params of type List"); + } +} diff --git a/src/providers/auth.ts b/src/providers/auth.ts deleted file mode 100644 index 813a69ab9..000000000 --- a/src/providers/auth.ts +++ /dev/null @@ -1,137 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { makeCloudFunction, CloudFunction, EventContext, LegacyEvent } from '../cloud-functions'; -import * as firebase from 'firebase-admin'; -import * as _ from 'lodash'; - -/** @internal */ -export const provider = 'google.firebase.auth'; -/** @internal */ -export const service = 'firebaseauth.googleapis.com'; - -/** Handle events in the Firebase Auth user lifecycle. */ -export function user() { - return new UserBuilder(() => { - if (!process.env.GCLOUD_PROJECT) { - throw new Error('process.env.GCLOUD_PROJECT is not set.'); - } - return 'projects/' + process.env.GCLOUD_PROJECT; - }); -} - -export class UserRecordMetadata implements firebase.auth.UserMetadata { - - constructor(public creationTime: string, public lastSignInTime: string) { }; - - /** Returns a plain JavaScript object with the properties of UserRecordMetadata. */ - toJSON() { - return { - creationTime: this.creationTime, - lastSignInTime: this.lastSignInTime, - }; - } -} - -/** Builder used to create Cloud Functions for Firebase Auth user lifecycle events. */ -export class UserBuilder { - private static dataConstructor(raw: LegacyEvent): firebase.auth.UserRecord { - return userRecordConstructor(raw.data); - } - - /** @internal */ - constructor(private triggerResource: () => string) { } - - /** Respond to the creation of a Firebase Auth user. */ - onCreate(handler: (user: UserRecord, context: EventContext) => PromiseLike | any): CloudFunction { - return this.onOperation(handler, 'user.create'); - } - - /** Respond to the deletion of a Firebase Auth user. */ - onDelete(handler: (user: UserRecord, context: EventContext) => PromiseLike | any): CloudFunction { - return this.onOperation(handler, 'user.delete'); - } - - private onOperation( - handler: (user: UserRecord, context: EventContext) => PromiseLike | any, - eventType: string - ): CloudFunction { - return makeCloudFunction({ - handler, - provider, - eventType, - service, - triggerResource: this.triggerResource, - dataConstructor: UserBuilder.dataConstructor, - legacyEventType: `providers/firebase.auth/eventTypes/${eventType}`, - }); - } -} - -/** - * The UserRecord passed to Cloud Functions is the same UserRecord that is returned by the Firebase Admin - * SDK. - */ -export type UserRecord = firebase.auth.UserRecord; - -export function userRecordConstructor(wireData: Object): firebase.auth.UserRecord { - // Falsey values from the wire format proto get lost when converted to JSON, this adds them back. - let falseyValues: any = { - email: null, - emailVerified: false, - displayName: null, - photoURL: null, - phoneNumber: null, - disabled: false, - providerData: [], - customClaims: {}, - passwordSalt: null, - passwordHash: null, - tokensValidAfterTime: null, - }; - let record = _.assign({}, falseyValues, wireData); - - let meta = _.get(record, 'metadata'); - if (meta) { - _.set(record, 'metadata', new UserRecordMetadata( - // Transform payload to firebase-admin v5.0.0 format because wire format is different (BUG 63167395) - meta.createdAt || meta.creationTime, - meta.lastSignedInAt || meta.lastSignInTime, - )); - } else { - _.set(record, 'metadata', new UserRecordMetadata(null, null)); - } - _.forEach(record.providerData, entry => { - _.set(entry, 'toJSON', () => { - return entry; - }); - }); - _.set(record, 'toJSON', () => { - const json: any = _.pick(record, ['uid', 'email', 'emailVerified', 'displayName', - 'photoURL', 'phoneNumber', 'disabled', 'passwordHash', 'passwordSalt', 'tokensValidAfterTime']); - json.metadata = _.get(record, 'metadata').toJSON(); - json.customClaims = _.cloneDeep(record.customClaims); - json.providerData = _.map(record.providerData, entry => entry.toJSON()); - return json; - }); - return record as firebase.auth.UserRecord; -} diff --git a/src/providers/crashlytics.ts b/src/providers/crashlytics.ts deleted file mode 100644 index 418936e14..000000000 --- a/src/providers/crashlytics.ts +++ /dev/null @@ -1,132 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { makeCloudFunction, CloudFunction, EventContext } from '../cloud-functions'; - -/** @internal */ -export const provider = 'google.firebase.crashlytics'; -/** @internal */ -export const service = 'fabric.io'; - -/** - * Handle events related to Crashlytics issues. An issue in Crashlytics is an - * aggregation of crashes which have a shared root cause. - */ -export function issue() { - return new IssueBuilder(() => { - if (!process.env.GCLOUD_PROJECT) { - throw new Error('process.env.GCLOUD_PROJECT is not set.'); - } - return 'projects/' + process.env.GCLOUD_PROJECT; - }); -} - -/** Builder used to create Cloud Functions for Crashlytics issue events. */ -export class IssueBuilder { - /** @internal */ - constructor(private triggerResource: () => string) { } - - /** @internal */ - onNewDetected(handler: any): Error { - throw new Error('"onNewDetected" is now deprecated, please use "onNew"'); - } - - /** Handle Crashlytics New Issue events. */ - onNew(handler: (issue: Issue, context: EventContext) => PromiseLike | any): CloudFunction { - return this.onEvent(handler, 'issue.new'); - } - - /** Handle Crashlytics Regressed Issue events. */ - onRegressed(handler: (issue: Issue, context: EventContext) => PromiseLike | any): CloudFunction { - return this.onEvent(handler, 'issue.regressed'); - } - - /** Handle Crashlytics Velocity Alert events. */ - onVelocityAlert(handler: (issue: Issue, context: EventContext) => PromiseLike | any): CloudFunction { - return this.onEvent(handler, 'issue.velocityAlert'); - } - - private onEvent( - handler: (issue: Issue, context: EventContext) => PromiseLike | any, - eventType: string - ): CloudFunction { - return makeCloudFunction({ - handler, - provider, - eventType, - service, - legacyEventType: `providers/firebase.crashlytics/eventTypes/${eventType}`, - triggerResource: this.triggerResource, - }); - } -} - -/** - * Interface representing a Crashlytics issue event that was logged for a specific issue. - */ -export interface Issue { - /** Fabric Issue ID. */ - issueId: string; - - /** Issue title. */ - issueTitle: string; - - /** App information. */ - appInfo: AppInfo; - - /** When the issue was created (ISO8601 time stamp). */ - createTime: string; - - /** When the issue was resolved, if the issue has been resolved (ISO8601 time stamp). */ - resolvedTime?: string; - - /** Contains details about the velocity alert, if this event was triggered by a velocity alert. */ - velocityAlert?: VelocityAlert; -} - -export interface VelocityAlert { - /** The percentage of sessions which have been impacted by this issue. Example: .04 */ - crashPercentage: number; - - /** The number of crashes that this issue has caused. */ - crashes: number; -} - -/** - * Interface representing the application where this issue occurred. - */ -export interface AppInfo { - /** The app's name. Example: "My Awesome App". */ - appName: string; - - /** The app's platform. Examples: "android", "ios". */ - appPlatform: string; - - /** Unique application identifier within an app store, either the Android package name or the iOS bundle id. */ - appId: string; - - /** - * The latest app version which is affected by the issue. - * Examples: "1.0", "4.3.1.1.213361", "2.3 (1824253)", "v1.8b22p6". - */ - latestAppVersion: string; -} diff --git a/src/providers/database.ts b/src/providers/database.ts deleted file mode 100644 index d21ac2409..000000000 --- a/src/providers/database.ts +++ /dev/null @@ -1,359 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as _ from 'lodash'; -import { apps } from '../apps'; -import { LegacyEvent, CloudFunction, makeCloudFunction, Event, EventContext, Change } from '../cloud-functions'; -import { normalizePath, applyChange, pathParts, joinPath } from '../utils'; -import * as firebase from 'firebase-admin'; -import { firebaseConfig } from '../config'; - -/** @internal */ -export const provider = 'google.firebase.database'; -/** @internal */ -export const service = 'firebaseio.com'; - -// NOTE(inlined): Should we relax this a bit to allow staging or alternate implementations of our API? -const databaseURLRegex = new RegExp('https://([^.]+).firebaseio.com'); - -/** - * Pick the Realtime Database instance to use. If omitted, will pick the default database for your project. - */ -export function instance(instance: string): InstanceBuilder { - return new InstanceBuilder(instance); -} - -export class InstanceBuilder { - /* @internal */ - constructor(private instance: string) {} - - ref(path: string): RefBuilder { - const normalized = normalizePath(path); - return new RefBuilder(apps(), () => `projects/_/instances/${this.instance}/refs/${normalized}`); - } -} - -/** - * Handle events at a Firebase Realtime Database Reference. - * - * This method behaves very similarly to the method of the same name in the - * client and Admin Firebase SDKs. Any change to the Database that affects the - * data at or below the provided `path` will fire an event in Cloud Functions. - * - * There are three important differences between listening to a Realtime - * Database event in Cloud Functions and using the Realtime Database in the - * client and Admin SDKs: - * 1. Cloud Functions allows wildcards in the `path` name. Any `path` component - * in curly brackets (`{}`) is a wildcard that matches all strings. The value - * that matched a certain invocation of a Cloud Function is returned as part - * of the `event.params` object. For example, `ref("messages/{messageId}")` - * matches changes at `/messages/message1` or `/messages/message2`, resulting - * in `event.params.messageId` being set to `"message1"` or `"message2"`, - * respectively. - * 2. Cloud Functions do not fire an event for data that already existed before - * the Cloud Function was deployed. - * 3. Cloud Function events have access to more information, including a - * snapshot of the previous event data and information about the user who - * triggered the Cloud Function. - */ -export function ref(path: string): RefBuilder { - const resourceGetter = () => { - const normalized = normalizePath(path); - const databaseURL = firebaseConfig().databaseURL; - if (!databaseURL) { - throw new Error('Missing expected firebase config value databaseURL, ' + - 'config is actually' + JSON.stringify(firebaseConfig()) + - '\n If you are unit testing, please set process.env.FIREBASE_CONFIG'); - } - const match = databaseURL.match(databaseURLRegex); - if (!match) { - throw new Error('Invalid value for config firebase.databaseURL: ' + databaseURL); - } - const subdomain = match[1]; - return `projects/_/instances/${subdomain}/refs/${normalized}`; - }; - - return new RefBuilder(apps(), resourceGetter); -} - -/** Builder used to create Cloud Functions for Firebase Realtime Database References. */ -export class RefBuilder { - /** @internal */ - constructor(private apps: apps.Apps, private triggerResource: () => string) { } - - /** Respond to any write that affects a ref. */ - onWrite(handler: ( - change: Change, - context: EventContext) => PromiseLike | any, - ): CloudFunction> { - return this.onOperation(handler, 'ref.write', this.changeConstructor); - } - - /** Respond to update on a ref. */ - onUpdate(handler: ( - change: Change, - context: EventContext) => PromiseLike | any, - ): CloudFunction> { - return this.onOperation(handler, 'ref.update', this.changeConstructor); - } - - /** Respond to new data on a ref. */ - onCreate(handler: ( - snapshot: DataSnapshot, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - let dataConstructor = (raw: LegacyEvent) => { - let [dbInstance, path] = resourceToInstanceAndPath(raw.resource); - return new DataSnapshot( - raw.data.delta, - path, - this.apps.admin, - dbInstance - ); - }; - return this.onOperation(handler, 'ref.create', dataConstructor); - } - - /** Respond to all data being deleted from a ref. */ - onDelete(handler: ( - snapshot: DataSnapshot, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - let dataConstructor = (raw: LegacyEvent) => { - let [dbInstance, path] = resourceToInstanceAndPath(raw.resource); - return new DataSnapshot( - raw.data.data, - path, - this.apps.admin, - dbInstance - ); - }; - return this.onOperation(handler, 'ref.delete', dataConstructor); - } - - private onOperation( - handler: (data: T, context: EventContext) => PromiseLike | any, - eventType: string, - dataConstructor: (raw: Event | LegacyEvent) => any): CloudFunction { - - return makeCloudFunction({ - handler, - provider, - service, - eventType, - legacyEventType: `providers/${provider}/eventTypes/${eventType}`, - triggerResource: this.triggerResource, - dataConstructor: dataConstructor, - before: (event) => this.apps.retain(), - after: (event) => this.apps.release(), - }); - } - - private changeConstructor = (raw: LegacyEvent): Change => { - let [dbInstance, path] = resourceToInstanceAndPath(raw.resource); - let before = new DataSnapshot( - raw.data.data, - path, - this.apps.admin, - dbInstance - ); - let after = new DataSnapshot( - applyChange(raw.data.data, raw.data.delta), - path, - this.apps.admin, - dbInstance - ); - return { - before: before, - after: after, - }; - }; -} - -/* Utility function to extract database reference from resource string */ -/** @internal */ -export function resourceToInstanceAndPath(resource: string) { - let resourceRegex = `projects/([^/]+)/instances/([^/]+)/refs(/.+)?`; - let match = resource.match(new RegExp(resourceRegex)); - if (!match) { - throw new Error(`Unexpected resource string for Firebase Realtime Database event: ${resource}. ` + - 'Expected string in the format of "projects/_/instances/{firebaseioSubdomain}/refs/{ref=**}"'); - } - let [, project, dbInstanceName, path] = match; - if (project !== '_') { - throw new Error(`Expect project to be '_' in a Firebase Realtime Database event`); - } - let dbInstance = 'https://' + dbInstanceName + '.firebaseio.com'; - return [dbInstance, path]; -} - -export class DataSnapshot { - public instance: string; - private _ref: firebase.database.Reference; - private _path: string; - private _data: any; - private _childPath: string; - - constructor( - data: any, - path?: string, // path will be undefined for the database root - private app?: firebase.app.App, - instance?: string, - ) { - if (instance) { // SDK always supplies instance, but user's unit tests may not - this.instance = instance; - } else if (app) { - this.instance = app.options.databaseURL; - } else if (process.env.GCLOUD_PROJECT) { - this.instance = 'https://' + process.env.GCLOUD_PROJECT + '.firebaseio.com'; - } - - this._path = path; - this._data = data; - } - - /** Ref returns a reference to the database with full admin access. */ - get ref(): firebase.database.Reference { - if (!this.app) { // may be unpopulated in user's unit tests - throw new Error('Please supply a Firebase app in the constructor for DataSnapshot' + - ' in order to use the .ref method.'); - } - if (!this._ref) { - this._ref = this.app.database(this.instance).ref(this._fullPath()); - } - return this._ref; - } - - get key(): string { - let last = _.last(pathParts(this._fullPath())); - return (!last || last === '') ? null : last; - } - - val(): any { - let parts = pathParts(this._childPath); - let source = this._data; - let node = _.cloneDeep(parts.length ? _.get(source, parts, null) : source); - return this._checkAndConvertToArray(node); - } - - // TODO(inlined): figure out what to do here - exportVal(): any { return this.val(); } - - // TODO(inlined): figure out what to do here - getPriority(): string|number|null { - return 0; - } - - exists(): boolean { - return !_.isNull(this.val()); - } - - child(childPath: string): DataSnapshot { - if (!childPath) { - return this; - } - return this._dup(childPath); - } - - forEach(action: (a: DataSnapshot) => boolean): boolean { - let val = this.val(); - if (_.isPlainObject(val)) { - return _.some(val, (value, key: string) => action(this.child(key)) === true); - } - return false; - } - - hasChild(childPath: string): boolean { - return this.child(childPath).exists(); - } - - hasChildren(): boolean { - let val = this.val(); - return _.isPlainObject(val) && _.keys(val).length > 0; - } - - numChildren(): number { - let val = this.val(); - return _.isPlainObject(val) ? Object.keys(val).length : 0; - } - - /** - * Prints the value of the snapshot; use '.previous.toJSON()' and '.current.toJSON()' to explicitly see - * the previous and current values of the snapshot. - */ - toJSON(): Object { - return this.val(); - } - - /* Recursive function to check if keys are numeric & convert node object to array if they are */ - private _checkAndConvertToArray(node: any): any { - if (node === null || typeof node === 'undefined') { - return null; - } - if (typeof node !== 'object') { - return node; - } - let obj: any = {}; - let numKeys = 0; - let maxKey = 0; - let allIntegerKeys = true; - for (let key in node) { - if (!node.hasOwnProperty(key)) { continue; } - let childNode = node[key]; - obj[key] = this._checkAndConvertToArray(childNode); - numKeys++; - const integerRegExp = /^(0|[1-9]\d*)$/; - if (allIntegerKeys && integerRegExp.test(key)) { - maxKey = Math.max(maxKey, Number(key)); - } else { - allIntegerKeys = false; - } - } - - if (allIntegerKeys && maxKey < 2 * numKeys) { - // convert to array. - let array: any = []; - _.forOwn(obj, (val, key) => { - array[key] = val; - }); - - return array; - } - return obj; - } - - private _dup(childPath?: string): DataSnapshot { - let dup = new DataSnapshot(this._data, undefined, this.app, this.instance); - [dup._path, dup._childPath] = [this._path, this._childPath]; - - if (childPath) { - dup._childPath = joinPath(dup._childPath, childPath); - } - - return dup; - } - - private _fullPath(): string { - let out = (this._path || '') + '/' + (this._childPath || ''); - return out; - } -} diff --git a/src/providers/firestore.ts b/src/providers/firestore.ts deleted file mode 100644 index c9dec2795..000000000 --- a/src/providers/firestore.ts +++ /dev/null @@ -1,182 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { posix } from 'path'; -import * as _ from 'lodash'; -import * as firebase from 'firebase-admin'; -import { apps } from '../apps'; -import { makeCloudFunction, CloudFunction, LegacyEvent, Change, - Event, EventContext } from '../cloud-functions'; -import { dateToTimestampProto } from '../encoder'; - -/** @internal */ -export const provider = 'google.firestore'; -/** @internal */ -export const service = 'firestore.googleapis.com'; -export type DocumentSnapshot = firebase.firestore.DocumentSnapshot; - -/** @internal */ -export const defaultDatabase = '(default)'; -let firestoreInstance: any; - -/** @internal */ -// Multiple databases are not yet supported by Firestore. -export function database(database: string = defaultDatabase) { - return new DatabaseBuilder(database); -} - -/** @internal */ -// Multiple databases are not yet supported by Firestore. -export function namespace(namespace: string) { - return database().namespace(namespace); -} - -export function document(path: string) { - return database().document(path); -} - -export class DatabaseBuilder { - /** @internal */ - constructor(private database: string) { } - - namespace(namespace: string) { - return new NamespaceBuilder(this.database, namespace); - } - - document(path: string) { - return new NamespaceBuilder(this.database).document(path); - } -} - -export class NamespaceBuilder { - /** @internal */ - constructor(private database: string, private namespace?: string) { } - - document(path: string) { - return new DocumentBuilder(() => { - if (!process.env.GCLOUD_PROJECT) { - throw new Error('process.env.GCLOUD_PROJECT is not set.'); - } - let database = posix.join('projects', process.env.GCLOUD_PROJECT, 'databases', this.database); - return posix.join( - database, - this.namespace ? `documents@${this.namespace}` : 'documents', - path); - }); - } -} - -function _getValueProto(data: any, resource: string, valueFieldName: string) { - if (_.isEmpty(_.get(data, valueFieldName))) { - // Firestore#snapshot_ takes resource string instead of proto for a non-existent snapshot - return resource; - } - let proto = { - fields: _.get(data, [valueFieldName, 'fields'], {}), - createTime: dateToTimestampProto(_.get(data, [valueFieldName, 'createTime'])), - updateTime: dateToTimestampProto(_.get(data, [valueFieldName, 'updateTime'])), - name: _.get(data, [valueFieldName, 'name'], resource), - }; - return proto; -}; - -/** @internal */ -export function snapshotConstructor(event: LegacyEvent): DocumentSnapshot { - if (!firestoreInstance) { - firestoreInstance = firebase.firestore(apps().admin); - } - let valueProto = _getValueProto(event.data, event.resource, 'value'); - let readTime = dateToTimestampProto(_.get(event, 'data.value.readTime')); - return firestoreInstance.snapshot_(valueProto, readTime, 'json'); -}; - -/** @internal */ -// TODO remove this function when wire format changes to new format -export function beforeSnapshotConstructor(event: LegacyEvent): DocumentSnapshot { - if (!firestoreInstance) { - firestoreInstance = firebase.firestore(apps().admin); - } - let oldValueProto = _getValueProto(event.data, event.resource, 'oldValue'); - let oldReadTime = dateToTimestampProto(_.get(event, 'data.oldValue.readTime')); - return firestoreInstance.snapshot_(oldValueProto, oldReadTime, 'json'); -} - -function changeConstructor(raw: LegacyEvent) { - return Change.fromObjects( - beforeSnapshotConstructor(raw), - snapshotConstructor(raw) - ); -} - -export class DocumentBuilder { - /** @internal */ - constructor(private triggerResource: () => string) { - // TODO what validation do we want to do here? - } - - /** Respond to all document writes (creates, updates, or deletes). */ - onWrite(handler: ( - change: Change, - context: EventContext) => PromiseLike | any, - ): CloudFunction> { - return this.onOperation(handler, 'document.write', changeConstructor); - }; - - /** Respond only to document updates. */ - onUpdate(handler: ( - change: Change, - context: EventContext) => PromiseLike | any, - ): CloudFunction> { - return this.onOperation(handler, 'document.update', changeConstructor); - } - - /** Respond only to document creations. */ - onCreate(handler: ( - snapshot: DocumentSnapshot, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'document.create', snapshotConstructor); - } - - /** Respond only to document deletions. */ - onDelete(handler: ( - snapshot: DocumentSnapshot, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'document.delete', beforeSnapshotConstructor); - } - - private onOperation( - handler: (data: T, context: EventContext) => PromiseLike | any, - eventType: string, - dataConstructor: (raw: Event | LegacyEvent) => any): CloudFunction { - return makeCloudFunction({ - handler, - provider, - eventType, - service, - triggerResource: this.triggerResource, - legacyEventType: `providers/cloud.firestore/eventTypes/${eventType}`, - dataConstructor, - }); - } -} diff --git a/src/providers/https.ts b/src/providers/https.ts deleted file mode 100644 index 82f5c56ba..000000000 --- a/src/providers/https.ts +++ /dev/null @@ -1,444 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { HttpsFunction } from '../cloud-functions'; -import * as express from 'express'; -import * as firebase from 'firebase-admin'; -import { apps } from '../apps'; -import * as _ from 'lodash'; -import * as cors from 'cors'; - -export function onRequest(handler: (req: express.Request, resp: express.Response) => void): HttpsFunction { - // lets us add __trigger without altering handler: - let cloudFunction: any = (req: express.Request, res: express.Response) => { handler(req, res); }; - cloudFunction.__trigger = {httpsTrigger: {}}; - - return cloudFunction; -} - -/** - * The set of Firebase Functions status codes. The codes are the same at the - * ones exposed by gRPC here: - * https://github.com/grpc/grpc/blob/master/doc/statuscodes.md - * - * Possible values: - * - 'cancelled': The operation was cancelled (typically by the caller). - * - 'unknown': Unknown error or an error from a different error domain. - * - 'invalid-argument': Client specified an invalid argument. Note that this - * differs from 'failed-precondition'. 'invalid-argument' indicates - * arguments that are problematic regardless of the state of the system - * (e.g. an invalid field name). - * - 'deadline-exceeded': Deadline expired before operation could complete. - * For operations that change the state of the system, this error may be - * returned even if the operation has completed successfully. For example, - * a successful response from a server could have been delayed long enough - * for the deadline to expire. - * - 'not-found': Some requested document was not found. - * - 'already-exists': Some document that we attempted to create already - * exists. - * - 'permission-denied': The caller does not have permission to execute the - * specified operation. - * - 'resource-exhausted': Some resource has been exhausted, perhaps a - * per-user quota, or perhaps the entire file system is out of space. - * - 'failed-precondition': Operation was rejected because the system is not - * in a state required for the operation's execution. - * - 'aborted': The operation was aborted, typically due to a concurrency - * issue like transaction aborts, etc. - * - 'out-of-range': Operation was attempted past the valid range. - * - 'unimplemented': Operation is not implemented or not supported/enabled. - * - 'internal': Internal errors. Means some invariants expected by - * underlying system has been broken. If you see one of these errors, - * something is very broken. - * - 'unavailable': The service is currently unavailable. This is most likely - * a transient condition and may be corrected by retrying with a backoff. - * - 'data-loss': Unrecoverable data loss or corruption. - * - 'unauthenticated': The request does not have valid authentication - * credentials for the operation. - */ -export type FunctionsErrorCode = - | 'ok' - | 'cancelled' - | 'unknown' - | 'invalid-argument' - | 'deadline-exceeded' - | 'not-found' - | 'already-exists' - | 'permission-denied' - | 'resource-exhausted' - | 'failed-precondition' - | 'aborted' - | 'out-of-range' - | 'unimplemented' - | 'internal' - | 'unavailable' - | 'data-loss' - | 'unauthenticated'; - -/** - * Standard error codes for different ways a request can fail, as defined by: - * https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto - * - * This map is used primarily to convert from a client error code string to - * to the HTTP format error code string, and make sure it's in the supported set. - */ -const errorCodeMap: { [name: string]: string } = { - 'ok': 'OK', - 'cancelled': 'CANCELLED', - 'unknown': 'UNKNOWN', - 'invalid-argument': 'INVALID_ARGUMENT', - 'deadline-exceeded': 'DEADLINE_EXCEEDED', - 'not-found': 'NOT_FOUND', - 'already-exists': 'ALREADY_EXISTS', - 'permission-denied': 'PERMISSION_DENIED', - 'unauthenticated': 'UNAUTHENTICATED', - 'resource-exhausted': 'RESOURCE_EXHAUSTED', - 'failed-precondition': 'FAILED_PRECONDITION', - 'aborted': 'ABORTED', - 'out-of-range': 'OUT_OF_RANGE', - 'unimplemented': 'UNIMPLEMENTED', - 'internal': 'INTERNAL', - 'unavailable': 'UNAVAILABLE', - 'data-loss': 'DATA_LOSS', -}; - -/** - * An explicit error that can be thrown from a handler to send an error to the - * client that called the function. - */ -export class HttpsError extends Error { - /** - * A standard error code that will be returned to the client. This also - * determines the HTTP status code of the response, as defined in code.proto. - */ - readonly code: FunctionsErrorCode; - - /** - * Extra data to be converted to JSON and included in the error response. - */ - readonly details?: any; - - constructor(code: FunctionsErrorCode, message?: string, details?: any) { - super(message); - - // This is a workaround for a bug in TypeScript when extending Error: - // tslint:disable-next-line - // https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work - Object.setPrototypeOf(this, HttpsError.prototype); - - if (!errorCodeMap[code]) { - throw new Error('Unknown error status: ' + code); - } - - this.code = code; - this.details = details; - } - - /** - * @internal - * A string representation of the Google error code for this error for HTTP. - */ - get status() { - return errorCodeMap[this.code]; - } - - /** - * @internal - * Returns the canonical http status code for the given error. - */ - get httpStatus(): number { - switch (this.code) { - case 'ok': return 200; - case 'cancelled': return 499; - case 'unknown': return 500; - case 'invalid-argument': return 400; - case 'deadline-exceeded': return 504; - case 'not-found': return 404; - case 'already-exists': return 409; - case 'permission-denied': return 403; - case 'unauthenticated': return 401; - case 'resource-exhausted': return 429; - case 'failed-precondition': return 400; - case 'aborted': return 409; - case 'out-of-range': return 400; - case 'unimplemented': return 501; - case 'internal': return 500; - case 'unavailable': return 503; - case 'data-loss': return 500; - // This should never happen as long as the type system is doing its job. - default: throw 'Invalid error code: ' + this.code; - } - } - - /** @internal */ - public toJSON() { - const json: any = { - status: this.status, - message: this.message, - }; - if (!_.isUndefined(this.details)) { - json.details = this.details; - } - return json; - } -} - -/** - * The interface for metadata for the API as passed to the handler. - */ -export interface CallableContext { - /** - * The result of decoding and verifying a Firebase Auth ID token. - */ - auth?: { - uid: string; - token: firebase.auth.DecodedIdToken; - }; - - /** - * An unverified token for a Firebase Instance ID. - */ - instanceIdToken?: string; - - /** - * The raw request handled by the callable. - */ - rawRequest: express.Request; -} - -// The allowed interface for an http request for a callable function. -interface HttpRequest extends express.Request { - body: { - data: any; - }; -}; - -// The format for the http body response to a callable function. -interface HttpResponseBody { - result?: any; - error?: HttpsError; -}; - -// Returns true if req is a properly formatted callable request. -function isValidRequest(req: express.Request): req is HttpRequest { - // The body must not be empty. - if (!req.body) { - console.warn('Request is missing body.'); - return false; - } - - // Make sure it's a POST. - if (req.method !== 'POST') { - console.warn('Request has invalid method.', req.method); - return false; - } - - // Check that the Content-Type is JSON. - let contentType = (req.header('Content-Type') || '').toLowerCase(); - // If it has a charset, just ignore it for now. - const semiColon = contentType.indexOf(';'); - if (semiColon >= 0) { - contentType = contentType.substr(0, semiColon).trim(); - } - if (contentType !== 'application/json') { - console.warn('Request has incorrect Content-Type.', contentType); - return false; - } - - // The body must have data. - if (_.isUndefined(req.body.data)) { - console.warn('Request body is missing data.', req.body); - return false; - } - - // TODO(klimt): Allow only whitelisted http headers. - - // Verify that the body does not have any extra fields. - const extras = _.omit(req.body, 'data'); - if (!_.isEmpty(extras)) { - console.warn('Request body has extra fields.', extras); - return false; - } - return true; -} - -const LONG_TYPE = 'type.googleapis.com/google.protobuf.Int64Value'; -const UNSIGNED_LONG_TYPE = 'type.googleapis.com/google.protobuf.UInt64Value'; - -/** - * Encodes arbitrary data in our special format for JSON. - * This is exposed only for testing. - */ -/** @internal */ -export function encode(data: any): any { - if (_.isNull(data) || _.isUndefined(data)) { - return null; - } - // Oddly, _.isFinite(new Number(x)) always returns false, so unwrap Numbers. - if (data instanceof Number) { - data = data.valueOf(); - } - if (_.isFinite(data)) { - // Any number in JS is safe to put directly in JSON and parse as a double - // without any loss of precision. - return data; - } - if (_.isBoolean(data)) { - return data; - } - if (_.isString(data)) { - return data; - } - if (_.isArray(data)) { - return _.map(data, encode); - } - if (_.isObject(data)) { - // It's not safe to use _.forEach, because the object might be 'array-like' - // if it has a key called 'length'. Note that this intentionally overrides - // any toJSON method that an object may have. - return _.mapValues(data, encode); - } - // If we got this far, the data is not encodable. - console.error('Data cannot be encoded in JSON.', data); - throw new Error('Data cannot be encoded in JSON: ' + data); -} - -/** - * Decodes our special format for JSON into native types. - * This is exposed only for testing. - */ -/** @internal */ -export function decode(data: any): any { - if (data === null) { - return data; - } - if (data['@type']) { - switch (data['@type']) { - case LONG_TYPE: - // Fall through and handle this the same as unsigned. - case UNSIGNED_LONG_TYPE: { - // Technically, this could work return a valid number for malformed - // data if there was a number followed by garbage. But it's just not - // worth all the extra code to detect that case. - const value = parseFloat(data.value); - if (_.isNaN(value)) { - console.error('Data cannot be decoded from JSON.', data); - throw new Error('Data cannot be decoded from JSON: ' + data); - } - return value; - } - default: { - console.error('Data cannot be decoded from JSON.', data); - throw new Error('Data cannot be decoded from JSON: ' + data); - } - } - } - if (_.isArray(data)) { - return _.map(data, decode); - } - if (_.isObject(data)) { - // It's not safe to use _.forEach, because the object might be 'array-like' - // if it has a key called 'length'. - return _.mapValues(data, decode); - } - // Anything else is safe to return. - return data; -} - -const corsHandler = cors({ origin: true, methods: 'POST' }); - -/** - * Declares a callable method for clients to call using a Firebase SDK. - * @param handler A method that takes a data and context and returns a value. - */ -export function onCall( - handler: (data: any, context: CallableContext) => any | Promise): HttpsFunction { - const func = async (req: express.Request, res: express.Response) => { - try { - if (!isValidRequest(req)) { - console.error('Invalid request', req); - throw new HttpsError('invalid-argument', 'Bad Request'); - } - - const context: CallableContext = { rawRequest: req }; - - const authorization = req.header('Authorization'); - if (authorization) { - const match = authorization.match(/^Bearer (.*)$/); - if (!match) { - throw new HttpsError('unauthenticated', 'Unauthenticated'); - } - const idToken = match[1]; - try { - const authToken = await apps().admin.auth().verifyIdToken(idToken); - context.auth = { - uid: authToken.uid, - token: authToken, - }; - } catch (e) { - throw new HttpsError('unauthenticated', 'Unauthenticated'); - } - } - - const instanceId = req.header('Firebase-Instance-ID-Token'); - if (instanceId) { - // Validating the token requires an http request, so we don't do it. - // If the user wants to use it for something, it will be validated then. - // Currently, the only real use case for this token is for sending - // pushes with FCM. In that case, the FCM APIs will validate the token. - context.instanceIdToken = req.header('Firebase-Instance-ID-Token'); - } - - const data = decode(req.body.data); - let result: any = await handler(data, context); - - // Encode the result as JSON to preserve types like Dates. - result = encode(result); - - // If there was some result, encode it in the body. - const responseBody: HttpResponseBody = { result }; - res.status(200).send(responseBody); - - } catch (error) { - if (!(error instanceof HttpsError)) { - // This doesn't count as an 'explicit' error. - console.error('Unhandled error', error); - error = new HttpsError('internal', 'INTERNAL'); - } - - const status = error.httpStatus; - const body = { error: error.toJSON() }; - res.status(status).send(body); - } - }; - - // Wrap the function with a cors handler. - const corsFunc: any = (req: express.Request, res: express.Response) => { - return corsHandler(req, res, () => func(req, res)); - }; - - corsFunc.__trigger = { - httpsTrigger: {}, - labels: { 'deployment-callable': 'true' }, - }; - - return corsFunc; -} diff --git a/src/providers/pubsub.ts b/src/providers/pubsub.ts deleted file mode 100644 index c2359b73c..000000000 --- a/src/providers/pubsub.ts +++ /dev/null @@ -1,96 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { CloudFunction, makeCloudFunction, EventContext } from '../cloud-functions'; - -/** @internal */ -export const provider = 'google.pubsub'; -/** @internal */ -export const service = 'pubsub.googleapis.com'; - -/** Handle events on a Cloud Pub/Sub topic. */ -export function topic(topic: string): TopicBuilder { - if (topic.indexOf('/') !== -1) { - throw new Error('Topic name may not have a /'); - } - - return new TopicBuilder(() => { - if (!process.env.GCLOUD_PROJECT) { - throw new Error('process.env.GCLOUD_PROJECT is not set.'); - } - return `projects/${process.env.GCLOUD_PROJECT}/topics/${topic}`; - }); -} - -/** Builder used to create Cloud Functions for Google Pub/Sub topics. */ -export class TopicBuilder { - - /** @internal */ - constructor(private triggerResource: () => string) { } - - /** Handle a Pub/Sub message that was published to a Cloud Pub/Sub topic */ - onPublish(handler: (message: Message, context: EventContext) => PromiseLike | any): CloudFunction { - return makeCloudFunction({ - handler, - provider, - service, - triggerResource: this.triggerResource, - eventType: 'topic.publish', - dataConstructor: (raw) => new Message(raw.data), - }); - } -} - -/** - * A Pub/Sub message. - * - * This class has an additional .json helper which will correctly deserialize any - * message that was a JSON object when published with the JS SDK. .json will throw - * if the message is not a base64 encoded JSON string. - */ -export class Message { - readonly data: string; - readonly attributes: {[key: string]: string }; - private _json: any; - - constructor(data: any) { - [this.data, this.attributes, this._json] = - [data.data, data.attributes || {}, data.json]; - } - - get json(): any { - if (typeof this._json === 'undefined') { - this._json = JSON.parse( - new Buffer(this.data, 'base64').toString('utf8') - ); - } - - return this._json; - } - - toJSON(): any { - return { - data: this.data, - attributes: this.attributes, - }; - } -} diff --git a/src/providers/storage.ts b/src/providers/storage.ts deleted file mode 100644 index f99ef3fa7..000000000 --- a/src/providers/storage.ts +++ /dev/null @@ -1,175 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import { CloudFunction, EventContext, makeCloudFunction } from '../cloud-functions'; -import { firebaseConfig } from '../config'; - -/** @internal */ -export const provider = 'google.storage'; -/** @internal */ -export const service = 'storage.googleapis.com'; - -/** - * The optional bucket function allows you to choose which buckets' events to handle. - * This step can be bypassed by calling object() directly, which will use the bucket that - * the Firebase SDK for Cloud Storage uses. - */ -export function bucket(bucket?: string): BucketBuilder { - const resourceGetter = () => { - bucket = bucket || firebaseConfig().storageBucket; - if (!bucket) { - throw new Error('Missing bucket name. If you are unit testing, please provide a bucket name' + - ' through `functions.storage.bucket(bucketName)`, or set process.env.FIREBASE_CONFIG.'); - } - if (!/^[a-z\d][a-z\d\\._-]{1,230}[a-z\d]$/.test(bucket)) { - throw new Error('Invalid bucket name ${bucket}'); - } - return `projects/_/buckets/${bucket}`; - }; - return new BucketBuilder(resourceGetter); -} - -export function object(): ObjectBuilder { - return bucket().object(); -} - -export class BucketBuilder { - /** @internal */ - constructor(private triggerResource: () => string) { } - - /** Handle events for objects in this bucket. */ - object() { - return new ObjectBuilder(this.triggerResource); - } -} - -export class ObjectBuilder { - /** @internal */ - constructor(private triggerResource: () => string) { } - - /** @internal */ - onChange(handler: any): Error { - throw new Error('"onChange" is now deprecated, please use "onArchive", "onDelete", ' + - '"onFinalize", or "onMetadataUpdate".'); - } - - /** Respond to archiving of an object, this is only for buckets that enabled object versioning. */ - onArchive(handler: ( - object: ObjectMetadata, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'object.archive'); - } - - /** Respond to the deletion of an object (not to archiving, if object versioning is enabled). */ - onDelete(handler: ( - object: ObjectMetadata, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'object.delete'); - } - - /** Respond to the successful creation of an object. */ - onFinalize(handler: ( - object: ObjectMetadata, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'object.finalize'); - } - - /** Respond to metadata updates of existing objects. */ - onMetadataUpdate(handler: ( - object: ObjectMetadata, - context: EventContext) => PromiseLike | any, - ): CloudFunction { - return this.onOperation(handler, 'object.metadataUpdate'); - } - - private onOperation( - handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any, - eventType: string): CloudFunction { - return makeCloudFunction({ - handler, - provider, - service, - eventType, - triggerResource: this.triggerResource, - }); - } -} - -export interface ObjectMetadata { - kind: string; - id: string; - bucket: string; - storageClass: string; - size: string; - timeCreated: string; - updated: string; - selfLink?: string; - name?: string; - generation?: string; - contentType?: string; - metageneration?: string; - timeDeleted?: string; - timeStorageClassUpdated?: string; - md5Hash?: string; - mediaLink?: string; - contentEncoding?: string; - contentDisposition?: string; - contentLanguage?: string; - cacheControl?: string; - metadata?: { - [key: string]: string; - }; - acl?: [ - { - kind?: string, - id?: string, - selfLink?: string, - bucket?: string, - object?: string, - generation?: string, - entity?: string, - role?: string, - email?: string, - entityId?: string, - domain?: string, - projectTeam?: { - projectNumber?: string, - team?: string - }, - etag?: string - } - ]; - owner?: { - entity?: string, - entityId?: string - }; - crc32c?: string; - componentCount?: string; - etag?: string; - customerEncryption?: { - encryptionAlgorithm?: string, - keySha256?: string, - }; -} diff --git a/src/runtime/loader.ts b/src/runtime/loader.ts new file mode 100644 index 000000000..5c7af9553 --- /dev/null +++ b/src/runtime/loader.ts @@ -0,0 +1,207 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. +import * as path from "path"; +import * as url from "url"; + +import { + ManifestEndpoint, + ManifestExtension, + ManifestRequiredAPI, + ManifestStack, +} from "./manifest"; + +import * as params from "../params"; + +/** + * Dynamically load import function to prevent TypeScript from + * transpiling into a require. + * + * See https://github.com/microsoft/TypeScript/issues/43329. + * + */ +// eslint-disable-next-line @typescript-eslint/no-implied-eval +const dynamicImport = new Function("modulePath", "return import(modulePath)") as ( + modulePath: string +) => Promise; + +async function loadModule(functionsDir: string) { + const absolutePath = path.resolve(functionsDir); + try { + return require(path.resolve(absolutePath)); + } catch (e) { + if (e.code === "ERR_REQUIRE_ESM" || e.code === "ERR_REQUIRE_ASYNC_MODULE") { + // This is an ESM package, or one containing top-level awaits! + const modulePath = require.resolve(absolutePath); + // Resolve module path to file:// URL. Required for windows support. + const moduleURL = url.pathToFileURL(modulePath).href; + return await dynamicImport(moduleURL); + } + throw e; + } +} + +/* @internal */ +export function extractStack( + module, + endpoints: Record, + requiredAPIs: ManifestRequiredAPI[], + extensions: Record, + prefix = "" +) { + for (const [name, valAsUnknown] of Object.entries(module)) { + // We're introspecting untrusted code here. Any is appropraite + const val: any = valAsUnknown; + if (typeof val === "function" && val.__endpoint && typeof val.__endpoint === "object") { + const funcName = prefix + name; + endpoints[funcName] = { + ...val.__endpoint, + entryPoint: funcName.replace(/-/g, "."), + }; + if (val.__requiredAPIs && Array.isArray(val.__requiredAPIs)) { + requiredAPIs.push(...val.__requiredAPIs); + } + } else if (isFirebaseRefExtension(val)) { + extensions[val.instanceId] = { + params: convertExtensionParams(val.params), + ref: val.FIREBASE_EXTENSION_REFERENCE, + events: val.events || [], + }; + } else if (isFirebaseLocalExtension(val)) { + extensions[val.instanceId] = { + params: convertExtensionParams(val.params), + localPath: val.FIREBASE_EXTENSION_LOCAL_PATH, + events: val.events || [], + }; + } else if (isObject(val)) { + extractStack(val, endpoints, requiredAPIs, extensions, prefix + name + "-"); + } + } +} + +function toTitleCase(txt: string): string { + return txt.charAt(0).toUpperCase() + txt.substring(1).toLowerCase(); +} + +function snakeToCamelCase(txt: string): string { + let ret = txt.toLowerCase(); + ret = ret.replace(/_/g, " "); + ret = ret.replace(/\w\S*/g, toTitleCase); + ret = ret.charAt(0).toLowerCase() + ret.substring(1); + return ret; +} + +function convertExtensionParams(params: object): Record { + const systemPrefixes: Record = { + FUNCTION: "firebaseextensions.v1beta.function", + V2FUNCTION: "firebaseextensions.v1beta.v2function", + }; + const converted: Record = {}; + for (const [rawKey, paramVal] of Object.entries(params)) { + let key = rawKey; + if (rawKey.startsWith("_") && rawKey !== "_EVENT_ARC_REGION") { + const prefix = rawKey.substring(1).split("_")[0]; + const suffix = rawKey.substring(2 + prefix.length); // 2 for underscores + key = `${systemPrefixes[prefix]}/${snakeToCamelCase(suffix)}`; + } + if (Array.isArray(paramVal)) { + converted[key] = paramVal.join(","); + } else { + converted[key] = paramVal as string; + } + } + return converted; +} + +function isObject(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} + +interface FirebaseLocalExtension { + FIREBASE_EXTENSION_LOCAL_PATH: string; + instanceId: string; + params: Record; + events?: string[]; +} + +const isFirebaseLocalExtension = (val: unknown): val is FirebaseLocalExtension => { + return ( + isObject(val) && + typeof val.FIREBASE_EXTENSION_LOCAL_PATH === "string" && + typeof val.instanceId === "string" && + isObject(val.params) && + (!val.events || Array.isArray(val.events)) + ); +}; + +interface FirebaseRefExtension { + FIREBASE_EXTENSION_REFERENCE: string; + instanceId: string; + params: Record; + events?: string[]; +} + +const isFirebaseRefExtension = (val: unknown): val is FirebaseRefExtension => { + return ( + isObject(val) && + typeof val.FIREBASE_EXTENSION_REFERENCE === "string" && + typeof val.instanceId === "string" && + isObject(val.params) && + (!val.events || Array.isArray(val.events)) + ); +}; + +/* @internal */ +export function mergeRequiredAPIs(requiredAPIs: ManifestRequiredAPI[]): ManifestRequiredAPI[] { + const apiToReasons: Record> = {}; + for (const { api, reason } of requiredAPIs) { + const reasons = apiToReasons[api] || new Set(); + reasons.add(reason); + apiToReasons[api] = reasons; + } + + const merged: ManifestRequiredAPI[] = []; + for (const [api, reasons] of Object.entries(apiToReasons)) { + merged.push({ api, reason: Array.from(reasons).join(" ") }); + } + return merged; +} + +/* @internal */ +export async function loadStack(functionsDir: string): Promise { + const endpoints: Record = {}; + const requiredAPIs: ManifestRequiredAPI[] = []; + const extensions: Record = {}; + const mod = await loadModule(functionsDir); + + extractStack(mod, endpoints, requiredAPIs, extensions); + + const stack: ManifestStack = { + endpoints, + specVersion: "v1alpha1", + requiredAPIs: mergeRequiredAPIs(requiredAPIs), + extensions, + }; + if (params.declaredParams.length > 0) { + stack.params = params.declaredParams.map((p) => p.toSpec()); + } + return stack; +} diff --git a/src/runtime/manifest.ts b/src/runtime/manifest.ts new file mode 100644 index 000000000..4d52d5eaf --- /dev/null +++ b/src/runtime/manifest.ts @@ -0,0 +1,306 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { RESET_VALUE, ResettableKeys, ResetValue } from "../common/options"; +import { Expression } from "../params"; +import { WireParamSpec, SecretParam } from "../params/types"; + +/** + * A definition of an extension as appears in the Manifest. + * Exactly one of ref or localPath must be present. + */ +export interface ManifestExtension { + params: Record; + ref?: string; + localPath?: string; + events: string[]; +} + +/** + * A definition of a function as appears in the Manifest. + * + * @alpha + */ +export interface ManifestEndpoint { + entryPoint?: string; + region?: string[]; + omit?: boolean | Expression; + platform?: string; + availableMemoryMb?: number | Expression | ResetValue; + maxInstances?: number | Expression | ResetValue; + minInstances?: number | Expression | ResetValue; + concurrency?: number | Expression | ResetValue; + timeoutSeconds?: number | Expression | ResetValue; + vpc?: + | { + connector: string | Expression; + egressSettings?: string | Expression | ResetValue; + } + | ResetValue; + serviceAccountEmail?: string | Expression | ResetValue; + cpu?: number | "gcf_gen1"; + labels?: Record; + ingressSettings?: string | Expression | ResetValue; + environmentVariables?: Record; + secretEnvironmentVariables?: Array<{ key: string; secret?: string }>; + + httpsTrigger?: { + invoker?: string[]; + }; + + callableTrigger?: { + genkitAction?: string; + }; + + eventTrigger?: { + eventFilters: Record>; + eventFilterPathPatterns?: Record>; + channel?: string; + eventType: string; + retry: boolean | Expression | ResetValue; + region?: string; + serviceAccountEmail?: string | ResetValue; + }; + + taskQueueTrigger?: { + retryConfig?: { + maxAttempts?: number | Expression | ResetValue; + maxRetrySeconds?: number | Expression | ResetValue; + maxBackoffSeconds?: number | Expression | ResetValue; + maxDoublings?: number | Expression | ResetValue; + minBackoffSeconds?: number | Expression | ResetValue; + }; + rateLimits?: { + maxConcurrentDispatches?: number | Expression | ResetValue; + maxDispatchesPerSecond?: number | Expression | ResetValue; + }; + }; + scheduleTrigger?: { + schedule: string | Expression; + timeZone?: string | Expression | ResetValue; + retryConfig?: { + retryCount?: number | Expression | ResetValue; + maxRetrySeconds?: string | Expression | ResetValue; + minBackoffSeconds?: string | Expression | ResetValue; + maxBackoffSeconds?: string | Expression | ResetValue; + maxDoublings?: number | Expression | ResetValue; + // Note: v1 schedule functions use *Duration instead of *Seconds + maxRetryDuration?: string | Expression | ResetValue; + minBackoffDuration?: string | Expression | ResetValue; + maxBackoffDuration?: string | Expression | ResetValue; + }; + }; + + blockingTrigger?: { + eventType: string; + options?: Record; + }; +} + +/** + * Description of API required for this stack. + * @alpha + */ +export interface ManifestRequiredAPI { + api: string; + reason: string; +} + +/** + * A definition of a function/extension deployment as appears in the Manifest. + * + * @alpha + */ +export interface ManifestStack { + specVersion: "v1alpha1"; + params?: WireParamSpec[]; + requiredAPIs: ManifestRequiredAPI[]; + endpoints: Record; + extensions?: Record; +} + +/** + * Returns the JSON representation of a ManifestStack, which has CEL + * expressions in its options as object types, with its expressions + * transformed into the actual CEL strings. + * + * @alpha + */ +export function stackToWire(stack: ManifestStack): Record { + const wireStack = stack as any; + const traverse = function traverse(obj: Record) { + for (const [key, val] of Object.entries(obj)) { + if (val instanceof Expression) { + obj[key] = val.toCEL(); + } else if (val instanceof ResetValue) { + obj[key] = val.toJSON(); + } else if (typeof val === "object" && val !== null) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + traverse(val as any); + } + } + }; + traverse(wireStack.endpoints); + return wireStack; +} + +const RESETTABLE_OPTIONS: ResettableKeys = { + availableMemoryMb: null, + timeoutSeconds: null, + minInstances: null, + maxInstances: null, + ingressSettings: null, + concurrency: null, + serviceAccountEmail: null, + vpc: null, +}; + +interface ManifestOptions { + preserveExternalChanges?: boolean; +} + +function initEndpoint( + resetOptions: Record, + ...opts: ManifestOptions[] +): ManifestEndpoint { + const endpoint: ManifestEndpoint = {}; + if (opts.every((opt) => !opt?.preserveExternalChanges)) { + for (const key of Object.keys(resetOptions)) { + endpoint[key] = RESET_VALUE; + } + } + return endpoint; +} + +/** + * @internal + */ +export function initV1Endpoint(...opts: ManifestOptions[]): ManifestEndpoint { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { concurrency, ...resetOpts } = RESETTABLE_OPTIONS; + return initEndpoint({ ...resetOpts }, ...opts); +} + +/** + * @internal + */ +export function initV2Endpoint(...opts: ManifestOptions[]): ManifestEndpoint { + return initEndpoint(RESETTABLE_OPTIONS, ...opts); +} + +const RESETTABLE_RETRY_CONFIG_OPTIONS: ResettableKeys< + ManifestEndpoint["taskQueueTrigger"]["retryConfig"] +> = { + maxAttempts: null, + maxDoublings: null, + maxBackoffSeconds: null, + maxRetrySeconds: null, + minBackoffSeconds: null, +}; + +const RESETTABLE_RATE_LIMITS_OPTIONS: ResettableKeys< + ManifestEndpoint["taskQueueTrigger"]["rateLimits"] +> = { + maxConcurrentDispatches: null, + maxDispatchesPerSecond: null, +}; + +/** + * @internal + */ +export function initTaskQueueTrigger( + ...opts: ManifestOptions[] +): ManifestEndpoint["taskQueueTrigger"] { + const taskQueueTrigger: ManifestEndpoint["taskQueueTrigger"] = { + retryConfig: {}, + rateLimits: {}, + }; + if (opts.every((opt) => !opt?.preserveExternalChanges)) { + for (const key of Object.keys(RESETTABLE_RETRY_CONFIG_OPTIONS)) { + taskQueueTrigger.retryConfig[key] = RESET_VALUE; + } + for (const key of Object.keys(RESETTABLE_RATE_LIMITS_OPTIONS)) { + taskQueueTrigger.rateLimits[key] = RESET_VALUE; + } + } + return taskQueueTrigger; +} + +const RESETTABLE_V1_SCHEDULE_OPTIONS: Omit< + ResettableKeys, + "maxBackoffSeconds" | "minBackoffSeconds" | "maxRetrySeconds" +> = { + retryCount: null, + maxDoublings: null, + maxRetryDuration: null, + maxBackoffDuration: null, + minBackoffDuration: null, +}; + +const RESETTABLE_V2_SCHEDULE_OPTIONS: Omit< + ResettableKeys, + "maxRetryDuration" | "maxBackoffDuration" | "minBackoffDuration" +> = { + retryCount: null, + maxDoublings: null, + maxRetrySeconds: null, + minBackoffSeconds: null, + maxBackoffSeconds: null, +}; + +function initScheduleTrigger( + resetOptions: Record, + schedule: string | Expression, + ...opts: ManifestOptions[] +): ManifestEndpoint["scheduleTrigger"] { + let scheduleTrigger: ManifestEndpoint["scheduleTrigger"] = { + schedule, + retryConfig: {}, + }; + if (opts.every((opt) => !opt?.preserveExternalChanges)) { + for (const key of Object.keys(resetOptions)) { + scheduleTrigger.retryConfig[key] = RESET_VALUE; + } + scheduleTrigger = { ...scheduleTrigger, timeZone: RESET_VALUE }; + } + return scheduleTrigger; +} + +/** + * @internal + */ +export function initV1ScheduleTrigger( + schedule: string | Expression, + ...opts: ManifestOptions[] +): ManifestEndpoint["scheduleTrigger"] { + return initScheduleTrigger(RESETTABLE_V1_SCHEDULE_OPTIONS, schedule, ...opts); +} + +/** + * @internal + */ +export function initV2ScheduleTrigger( + schedule: string | Expression, + ...opts: ManifestOptions[] +): ManifestEndpoint["scheduleTrigger"] { + return initScheduleTrigger(RESETTABLE_V2_SCHEDULE_OPTIONS, schedule, ...opts); +} diff --git a/src/types/global.d.ts b/src/types/global.d.ts new file mode 100644 index 000000000..55ae292d9 --- /dev/null +++ b/src/types/global.d.ts @@ -0,0 +1,7 @@ +/* eslint-disable @typescript-eslint/no-empty-interface */ +export {}; + +declare global { + interface AbortSignal {} + interface AbortController {} +} diff --git a/src/utils.ts b/src/utils.ts deleted file mode 100644 index 522dadb9f..000000000 --- a/src/utils.ts +++ /dev/null @@ -1,92 +0,0 @@ -// The MIT License (MIT) -// -// Copyright (c) 2017 Firebase -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -import * as _ from 'lodash'; - -export function normalizePath(path: string): string { - if (!path) { - return ''; - } - return path.replace(/^\//,'').replace(/\/$/, ''); -} - -export function pathParts(path: string): string[] { - if (!path || path === '' || path === '/') { - return []; - } - return normalizePath(path).split('/'); -} - -export function joinPath(base: string, child: string) { - return pathParts(base).concat(pathParts(child)).join('/'); -} - -export function applyChange(src: any, dest: any) { - // if not mergeable, don't merge - if (!_.isPlainObject(dest) || !_.isPlainObject(src)) { - return dest; - } - - return pruneNulls(_.merge({}, src, dest)); -} - -export function pruneNulls(obj: any) { - for (let key in obj) { - if (obj[key] === null) { - delete obj[key]; - } else if (_.isPlainObject(obj[key])) { - pruneNulls(obj[key]); - } - } - return obj; -} - -export function valAt(source: any, path?: string) { - if (source === null) { - return null; - } else if (typeof source !== 'object') { - return path ? null : source; - } - - let parts = pathParts(path); - if (!parts.length) { - return source; - } - - let cur = source; - let leaf; - while (parts.length) { - let key = parts.shift(); - if (cur[key] === null || leaf) { - return null; - } else if (typeof cur[key] === 'object') { - if (parts.length) { - cur = cur[key]; - } else { - return cur[key]; - } - } else { - leaf = cur[key]; - } - } - return leaf; -} diff --git a/src/v1/cloud-functions.ts b/src/v1/cloud-functions.ts new file mode 100644 index 000000000..7fd24252b --- /dev/null +++ b/src/v1/cloud-functions.ts @@ -0,0 +1,666 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import type { Request, Response } from "express"; +import { warn } from "../logger"; +import { + DEFAULT_FAILURE_POLICY, + DeploymentOptions, + RESET_VALUE, + FailurePolicy, + Schedule, +} from "./function-configuration"; +export type { Request, Response }; +import { + convertIfPresent, + copyIfPresent, + serviceAccountFromShorthand, + durationFromSeconds, +} from "../common/encoding"; +import { + initV1Endpoint, + initV1ScheduleTrigger, + ManifestEndpoint, + ManifestRequiredAPI, +} from "../runtime/manifest"; +import { ResetValue } from "../common/options"; +import { SecretParam } from "../params/types"; +import { withInit } from "../common/onInit"; + +export { Change } from "../common/change"; + +/** @internal */ +const WILDCARD_REGEX = new RegExp("{[^/{}]*}", "g"); + +/** + * Wire format for an event. + */ +export interface Event { + /** + * Wire format for an event context. + */ + context: { + eventId: string; + timestamp: string; + eventType: string; + resource: Resource; + domain?: string; + auth?: { + variable?: { + uid?: string; + token?: string; + }; + admin: boolean; + }; + }; + /** + * Event data over wire. + */ + data: any; +} + +/** + * The context in which an event occurred. + * + * @remarks + * An EventContext describes: + * - The time an event occurred. + * - A unique identifier of the event. + * - The resource on which the event occurred, if applicable. + * - Authorization of the request that triggered the event, if applicable and + * available. + */ +export interface EventContext> { + /** + * Authentication information for the user that triggered the function. + * + * @remarks + * This object contains `uid` and `token` properties for authenticated users. + * For more detail including token keys, see the + * {@link https://firebase.google.com/docs/reference/rules/rules#properties | security rules reference}. + * + * This field is only populated for Realtime Database triggers and Callable + * functions. For an unauthenticated user, this field is null. For Firebase + * admin users and event types that do not provide user information, this field + * does not exist. + */ + auth?: { + uid: string; + token: EventContextAuthToken; + /** If available, the unparsed ID token. */ + rawToken?: string; + }; + + /** + * The level of permissions for a user. + * + * @remarks + * Valid values are: + * + * - `ADMIN`: Developer user or user authenticated via a service account. + * + * - `USER`: Known user. + * + * - `UNAUTHENTICATED`: Unauthenticated action + * + * - `null`: For event types that do not provide user information (all except + * Realtime Database). + */ + authType?: "ADMIN" | "USER" | "UNAUTHENTICATED"; + + /** + * The event’s unique identifier. + */ + eventId: string; + + /** + * Type of event. + * + * @remarks + * Possible values are: + * + * - `google.analytics.event.log` + * + * - `google.firebase.auth.user.create` + * + * - `google.firebase.auth.user.delete` + * + * - `google.firebase.database.ref.write` + * + * - `google.firebase.database.ref.create` + * + * - `google.firebase.database.ref.update` + * + * - `google.firebase.database.ref.delete` + * + * - `google.firestore.document.write` + * + * - `google.firestore.document.create` + * + * - `google.firestore.document.update` + * + * - `google.firestore.document.delete` + * + * - `google.pubsub.topic.publish` + * + * - `google.firebase.remoteconfig.update` + * + * - `google.storage.object.finalize` + * + * - `google.storage.object.archive` + * + * - `google.storage.object.delete` + * + * - `google.storage.object.metadataUpdate` + * + * - `google.testing.testMatrix.complete` + */ + eventType: string; + + /** + * An object containing the values of the wildcards in the `path` parameter + * provided to the {@link fireabase-functions.v1.database#ref | `ref()`} method for a Realtime Database trigger. + */ + params: Params; + + /** + * The resource that emitted the event. + * + * @remarks + * Valid values are: + * + * Analytics: `projects//events/` + * + * Realtime Database: `projects/_/instances//refs/` + * + * Storage: `projects/_/buckets//objects/#` + * + * Authentication: `projects/` + * + * Pub/Sub: `projects//topics/` + * + * Because Realtime Database instances and Cloud Storage buckets are globally + * unique and not tied to the project, their resources start with `projects/_`. + * Underscore is not a valid project name. + */ + resource: Resource; + /** + * Timestamp for the event as an {@link https://www.ietf.org/rfc/rfc3339.txt | RFC 3339} string. + */ + timestamp: string; +} + +/** + * https://firebase.google.com/docs/reference/security/database#authtoken + */ +export interface EventContextAuthToken { + iss: string; + aud: string; + auth_time: number; + iat: number; + exp: number; + sub: string; + email?: string; + email_verified?: boolean; + phone_number?: string; + name?: string; + firebase?: { + identities?: { + [key: string]: string[]; + }; + sign_in_provider?: string; + tenant?: string; + }; +} + +/** + * Resource is a standard format for defining a resource + * (google.rpc.context.AttributeContext.Resource). In Cloud Functions, it is the + * resource that triggered the function - such as a storage bucket. + */ +export interface Resource { + /** The name of the service that this resource belongs to. */ + service: string; + /** + * The stable identifier (name) of a resource on the service. + * A resource can be logically identified as "//{resource.service}/{resource.name}" + */ + name: string; + /** + * The type of the resource. The syntax is platform-specific because different platforms define their resources differently. + * For Google APIs, the type format must be "{service}/{kind}" + */ + type?: string; + /** Map of Resource's labels. */ + labels?: { [tag: string]: string }; +} + +/** + * TriggerAnnotion is used internally by the firebase CLI to understand what + * type of Cloud Function to deploy. + */ +interface TriggerAnnotation { + availableMemoryMb?: number; + blockingTrigger?: { + eventType: string; + options?: Record; + }; + eventTrigger?: { + eventType: string; + resource: string; + service: string; + }; + failurePolicy?: FailurePolicy; + httpsTrigger?: { + invoker?: string[]; + }; + labels?: { [key: string]: string }; + regions?: string[]; + schedule?: Schedule; + timeout?: string; + vpcConnector?: string; + vpcConnectorEgressSettings?: string; + serviceAccountEmail?: string; + ingressSettings?: string; + secrets?: string[]; +} + +/** + * A Runnable has a `run` method which directly invokes the user-defined + * function - useful for unit testing. + */ +export interface Runnable { + /** Directly invoke the user defined function. */ + run: (data: T, context: any) => PromiseLike | any; +} + +/** + * The function type for HTTPS triggers. This should be exported from your + * JavaScript file to define a Cloud Function. + * + * @remarks + * This type is a special JavaScript function which takes Express + * {@link https://expressjs.com/en/api.html#req | `Request` } and + * {@link https://expressjs.com/en/api.html#res | `Response` } objects as its only + * arguments. + */ +export interface HttpsFunction { + (req: Request, resp: Response): void | Promise; + + /** @alpha */ + __trigger: TriggerAnnotation; + + /** @alpha */ + __endpoint: ManifestEndpoint; + + /** @alpha */ + __requiredAPIs?: ManifestRequiredAPI[]; +} + +/** + * The function type for Auth Blocking triggers. + * + * @remarks + * This type is a special JavaScript function for Auth Blocking triggers which takes Express + * {@link https://expressjs.com/en/api.html#req | `Request` } and + * {@link https://expressjs.com/en/api.html#res | `Response` } objects as its only + * arguments. + */ +export interface BlockingFunction { + /** @public */ + (req: Request, resp: Response): void | Promise; + + /** @alpha */ + __trigger: TriggerAnnotation; + + /** @alpha */ + __endpoint: ManifestEndpoint; + + /** @alpha */ + __requiredAPIs?: ManifestRequiredAPI[]; +} + +/** + * The function type for all non-HTTPS triggers. This should be exported + * from your JavaScript file to define a Cloud Function. + * + * This type is a special JavaScript function which takes a templated + * `Event` object as its only argument. + */ +export interface CloudFunction extends Runnable { + (input: any, context?: any): PromiseLike | any; + + /** @alpha */ + __trigger: TriggerAnnotation; + + /** @alpha */ + __endpoint: ManifestEndpoint; + + /** @alpha */ + __requiredAPIs?: ManifestRequiredAPI[]; +} + +/** @internal */ +export interface MakeCloudFunctionArgs { + after?: (raw: Event) => void; + before?: (raw: Event) => void; + contextOnlyHandler?: (context: EventContext) => PromiseLike | any; + dataConstructor?: (raw: Event) => EventData; + eventType: string; + handler?: (data: EventData, context: EventContext) => PromiseLike | any; + labels?: Record; + legacyEventType?: string; + options?: DeploymentOptions; + /* + * TODO: should remove `provider` and require a fully qualified `eventType` + * once all providers have migrated to new format. + */ + provider: string; + service: string; + triggerResource: () => string; +} + +/** @internal */ +export function makeCloudFunction({ + contextOnlyHandler, + dataConstructor = (raw: Event) => raw.data, + eventType, + handler, + labels = {}, + legacyEventType, + options = {}, + provider, + service, + triggerResource, +}: MakeCloudFunctionArgs): CloudFunction { + handler = withInit(handler ?? contextOnlyHandler); + const cloudFunction: any = (data: any, context: any) => { + if (legacyEventType && context.eventType === legacyEventType) { + /* + * v1beta1 event flow has different format for context, transform them to + * new format. + */ + context.eventType = provider + "." + eventType; + context.resource = { + service, + name: context.resource, + }; + } + + const event: Event = { + data, + context, + }; + + if (provider === "google.firebase.database") { + context.authType = _detectAuthType(event); + if (context.authType !== "ADMIN") { + context.auth = _makeAuth(event, context.authType); + } else { + delete context.auth; + } + } + + if (triggerResource() == null) { + Object.defineProperty(context, "params", { + get: () => { + throw new Error("context.params is not available when using the handler namespace."); + }, + }); + } else { + context.params = context.params || _makeParams(context, triggerResource); + } + + let promise; + if (labels && labels["deployment-scheduled"]) { + // Scheduled function do not have meaningful data, so exclude it + promise = contextOnlyHandler(context); + } else { + const dataOrChange = dataConstructor(event); + promise = handler(dataOrChange, context); + } + if (typeof promise === "undefined") { + warn("Function returned undefined, expected Promise or value"); + } + return Promise.resolve(promise); + }; + + Object.defineProperty(cloudFunction, "__trigger", { + get: () => { + if (triggerResource() == null) { + return {}; + } + + const trigger: any = { + ...optionsToTrigger(options), + eventTrigger: { + resource: triggerResource(), + eventType: legacyEventType || provider + "." + eventType, + service, + }, + }; + if (!!labels && Object.keys(labels).length) { + trigger.labels = { ...trigger.labels, ...labels }; + } + return trigger; + }, + }); + + Object.defineProperty(cloudFunction, "__endpoint", { + get: () => { + if (triggerResource() == null) { + return undefined; + } + + const endpoint: ManifestEndpoint = { + platform: "gcfv1", + ...initV1Endpoint(options), + ...optionsToEndpoint(options), + }; + + if (options.schedule) { + endpoint.scheduleTrigger = initV1ScheduleTrigger(options.schedule.schedule, options); + copyIfPresent(endpoint.scheduleTrigger, options.schedule, "timeZone"); + copyIfPresent( + endpoint.scheduleTrigger.retryConfig, + options.schedule.retryConfig, + "retryCount", + "maxDoublings", + "maxBackoffDuration", + "maxRetryDuration", + "minBackoffDuration" + ); + } else { + endpoint.eventTrigger = { + eventType: legacyEventType || provider + "." + eventType, + eventFilters: { + resource: triggerResource(), + }, + retry: !!options.failurePolicy, + }; + } + + // Note: We intentionally don't make use of labels args here. + // labels is used to pass SDK-defined labels to the trigger, which isn't + // something we will do in the container contract world. + endpoint.labels = { ...endpoint.labels }; + + return endpoint; + }, + }); + + if (options.schedule) { + cloudFunction.__requiredAPIs = [ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]; + } + + cloudFunction.run = handler || contextOnlyHandler; + return cloudFunction; +} + +function _makeParams( + context: EventContext, + triggerResourceGetter: () => string +): Record { + if (context.params) { + // In unit testing, user may directly provide `context.params`. + return context.params; + } + if (!context.resource) { + // In unit testing, `resource` may be unpopulated for a test event. + return {}; + } + const triggerResource = triggerResourceGetter(); + const wildcards = triggerResource.match(WILDCARD_REGEX); + const params: { [option: string]: any } = {}; + + // Note: some tests don't set context.resource.name + const eventResourceParts = context?.resource?.name?.split?.("/"); + if (wildcards && eventResourceParts) { + const triggerResourceParts = triggerResource.split("/"); + for (const wildcard of wildcards) { + const wildcardNoBraces = wildcard.slice(1, -1); + const position = triggerResourceParts.indexOf(wildcard); + params[wildcardNoBraces] = eventResourceParts[position]; + } + } + return params; +} + +function _makeAuth(event: Event, authType: string) { + if (authType === "UNAUTHENTICATED") { + return null; + } + return { + uid: event.context?.auth?.variable?.uid, + token: event.context?.auth?.variable?.token, + }; +} + +function _detectAuthType(event: Event) { + if (event.context?.auth?.admin) { + return "ADMIN"; + } + if (event.context?.auth?.variable) { + return "USER"; + } + return "UNAUTHENTICATED"; +} + +/** @hidden */ +export function optionsToTrigger(options: DeploymentOptions) { + const trigger: any = {}; + copyIfPresent( + trigger, + options, + "regions", + "schedule", + "minInstances", + "maxInstances", + "ingressSettings", + "vpcConnectorEgressSettings", + "vpcConnector", + "labels", + "secrets" + ); + convertIfPresent(trigger, options, "failurePolicy", "failurePolicy", (policy) => { + if (policy === false) { + return undefined; + } else if (policy === true) { + return DEFAULT_FAILURE_POLICY; + } else { + return policy; + } + }); + convertIfPresent(trigger, options, "timeout", "timeoutSeconds", durationFromSeconds); + convertIfPresent(trigger, options, "availableMemoryMb", "memory", (mem) => { + const memoryLookup = { + "128MB": 128, + "256MB": 256, + "512MB": 512, + "1GB": 1024, + "2GB": 2048, + "4GB": 4096, + "8GB": 8192, + }; + return memoryLookup[mem]; + }); + convertIfPresent( + trigger, + options, + "serviceAccountEmail", + "serviceAccount", + serviceAccountFromShorthand + ); + + return trigger; +} + +export function optionsToEndpoint(options: DeploymentOptions): ManifestEndpoint { + const endpoint: ManifestEndpoint = {}; + copyIfPresent( + endpoint, + options, + "omit", + "minInstances", + "maxInstances", + "ingressSettings", + "labels", + "timeoutSeconds" + ); + convertIfPresent(endpoint, options, "region", "regions"); + convertIfPresent(endpoint, options, "serviceAccountEmail", "serviceAccount", (sa) => sa); + convertIfPresent( + endpoint, + options, + "secretEnvironmentVariables", + "secrets", + (secrets: (string | SecretParam)[]) => + secrets.map((secret) => ({ key: secret instanceof SecretParam ? secret.name : secret })) + ); + if (options?.vpcConnector !== undefined) { + if (options.vpcConnector === null || options.vpcConnector instanceof ResetValue) { + endpoint.vpc = RESET_VALUE; + } else { + const vpc: ManifestEndpoint["vpc"] = { connector: options.vpcConnector }; + convertIfPresent(vpc, options, "egressSettings", "vpcConnectorEgressSettings"); + endpoint.vpc = vpc; + } + } + convertIfPresent(endpoint, options, "availableMemoryMb", "memory", (mem) => { + const memoryLookup = { + "128MB": 128, + "256MB": 256, + "512MB": 512, + "1GB": 1024, + "2GB": 2048, + "4GB": 4096, + "8GB": 8192, + }; + return typeof mem === "object" ? mem : memoryLookup[mem]; + }); + return endpoint; +} diff --git a/src/v1/config.ts b/src/v1/config.ts new file mode 100644 index 000000000..b50ce7b31 --- /dev/null +++ b/src/v1/config.ts @@ -0,0 +1,36 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +export { firebaseConfig } from "../common/config"; + +/** + * @deprecated `functions.config()` has been removed in firebase-functions v7. + * Migrate to environment parameters using the `params` module immediately. + * Migration guide: https://firebase.google.com/docs/functions/config-env#migrate-config + */ +export const config: never = (() => { + throw new Error( + "functions.config() has been removed in firebase-functions v7. " + + "Migrate to environment parameters using the params module. " + + "Migration guide: https://firebase.google.com/docs/functions/config-env#migrate-config" + ); +}) as never; diff --git a/src/v1/function-builder.ts b/src/v1/function-builder.ts new file mode 100644 index 000000000..e70f26166 --- /dev/null +++ b/src/v1/function-builder.ts @@ -0,0 +1,501 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; + +import { ResetValue } from "../common/options"; +import { Expression, SecretParam } from "../params/types"; +import { EventContext } from "./cloud-functions"; +import { + DeploymentOptions, + INGRESS_SETTINGS_OPTIONS, + MAX_NUMBER_USER_LABELS, + MAX_TIMEOUT_SECONDS, + RuntimeOptions, + SUPPORTED_REGIONS, + VALID_MEMORY_OPTIONS, + VPC_EGRESS_SETTINGS_OPTIONS, +} from "./function-configuration"; +import * as analytics from "./providers/analytics"; +import * as auth from "./providers/auth"; +import * as database from "./providers/database"; +import * as firestore from "./providers/firestore"; +import * as https from "./providers/https"; +import * as pubsub from "./providers/pubsub"; +import * as remoteConfig from "./providers/remoteConfig"; +import * as storage from "./providers/storage"; +import * as tasks from "./providers/tasks"; +import * as testLab from "./providers/testLab"; + +/** + * Assert that the runtime options passed in are valid. + * @param runtimeOptions object containing memory and timeout information. + * @throws { Error } Memory and TimeoutSeconds values must be valid. + */ +function assertRuntimeOptionsValid(runtimeOptions: RuntimeOptions): boolean { + const mem = runtimeOptions.memory; + if (mem && typeof mem !== "object" && !VALID_MEMORY_OPTIONS.includes(mem)) { + throw new Error( + `The only valid memory allocation values are: ${VALID_MEMORY_OPTIONS.join(", ")}` + ); + } + if ( + typeof runtimeOptions.timeoutSeconds === "number" && + (runtimeOptions.timeoutSeconds > MAX_TIMEOUT_SECONDS || runtimeOptions.timeoutSeconds < 0) + ) { + throw new Error(`TimeoutSeconds must be between 0 and ${MAX_TIMEOUT_SECONDS}`); + } + + if ( + runtimeOptions.ingressSettings && + !(runtimeOptions.ingressSettings instanceof ResetValue) && + !INGRESS_SETTINGS_OPTIONS.includes(runtimeOptions.ingressSettings) + ) { + throw new Error( + `The only valid ingressSettings values are: ${INGRESS_SETTINGS_OPTIONS.join(",")}` + ); + } + + if ( + runtimeOptions.vpcConnectorEgressSettings && + !(runtimeOptions.vpcConnectorEgressSettings instanceof ResetValue) && + !VPC_EGRESS_SETTINGS_OPTIONS.includes(runtimeOptions.vpcConnectorEgressSettings) + ) { + throw new Error( + `The only valid vpcConnectorEgressSettings values are: ${VPC_EGRESS_SETTINGS_OPTIONS.join( + "," + )}` + ); + } + + validateFailurePolicy(runtimeOptions.failurePolicy); + const serviceAccount = runtimeOptions.serviceAccount; + if ( + serviceAccount && + !( + serviceAccount === "default" || + serviceAccount instanceof ResetValue || + serviceAccount instanceof Expression || + serviceAccount.includes("@") + ) + ) { + throw new Error( + `serviceAccount must be set to 'default', a string expression, a service account email, or '{serviceAccountName}@'` + ); + } + + if (runtimeOptions.labels) { + // Labels must follow the rules listed in + // https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements + + if (Object.keys(runtimeOptions.labels).length > MAX_NUMBER_USER_LABELS) { + throw new Error( + `A function must not have more than ${MAX_NUMBER_USER_LABELS} user-defined labels.` + ); + } + + // We reserve the 'deployment' and 'firebase' namespaces for future feature development. + const reservedKeys = Object.keys(runtimeOptions.labels).filter( + (key) => key.startsWith("deployment") || key.startsWith("firebase") + ); + if (reservedKeys.length) { + throw new Error( + `Invalid labels: ${reservedKeys.join( + ", " + )}. Labels may not start with reserved names 'deployment' or 'firebase'` + ); + } + + const invalidLengthKeys = Object.keys(runtimeOptions.labels).filter( + (key) => key.length < 1 || key.length > 63 + ); + if (invalidLengthKeys.length > 0) { + throw new Error( + `Invalid labels: ${invalidLengthKeys.join( + ", " + )}. Label keys must be between 1 and 63 characters in length.` + ); + } + + const invalidLengthValues = Object.values(runtimeOptions.labels).filter( + (value) => value.length > 63 + ); + if (invalidLengthValues.length > 0) { + throw new Error( + `Invalid labels: ${invalidLengthValues.join( + ", " + )}. Label values must be less than 64 charcters.` + ); + } + + // Keys can contain lowercase letters, foreign characters, numbers, _ or -. They must start with a letter. + const validKeyPattern = /^[\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}$/u; + const invalidKeys = Object.keys(runtimeOptions.labels).filter( + (key) => !validKeyPattern.test(key) + ); + if (invalidKeys.length > 0) { + throw new Error( + `Invalid labels: ${invalidKeys.join( + ", " + )}. Label keys can only contain lowercase letters, international characters, numbers, _ or -, and must start with a letter.` + ); + } + + // Values can contain lowercase letters, foreign characters, numbers, _ or -. + const validValuePattern = /^[\p{Ll}\p{Lo}\p{N}_-]{0,63}$/u; + const invalidValues = Object.values(runtimeOptions.labels).filter( + (value) => !validValuePattern.test(value) + ); + if (invalidValues.length > 0) { + throw new Error( + `Invalid labels: ${invalidValues.join( + ", " + )}. Label values can only contain lowercase letters, international characters, numbers, _ or -.` + ); + } + } + + if (typeof runtimeOptions.invoker === "string" && runtimeOptions.invoker.length === 0) { + throw new Error("Invalid service account for function invoker, must be a non-empty string"); + } + if (runtimeOptions.invoker !== undefined && Array.isArray(runtimeOptions.invoker)) { + if (runtimeOptions.invoker.length === 0) { + throw new Error("Invalid invoker array, must contain at least 1 service account entry"); + } + for (const serviceAccount of runtimeOptions.invoker) { + if (serviceAccount.length === 0) { + throw new Error("Invalid invoker array, a service account must be a non-empty string"); + } + if (serviceAccount === "public") { + throw new Error( + "Invalid invoker array, a service account cannot be set to the 'public' identifier" + ); + } + if (serviceAccount === "private") { + throw new Error( + "Invalid invoker array, a service account cannot be set to the 'private' identifier" + ); + } + } + } + + if (runtimeOptions.secrets !== undefined) { + const invalidSecrets = runtimeOptions.secrets.filter( + (s) => !/^[A-Za-z\d\-_]+$/.test(s instanceof SecretParam ? s.name : s) + ); + if (invalidSecrets.length > 0) { + throw new Error( + `Invalid secrets: ${invalidSecrets.join(",")}. ` + + "Secret must be configured using the resource id (e.g. API_KEY)" + ); + } + } + + if ("allowInvalidAppCheckToken" in runtimeOptions) { + throw new Error( + 'runWith option "allowInvalidAppCheckToken" has been inverted and ' + + 'renamed "enforceAppCheck"' + ); + } + + return true; +} + +function validateFailurePolicy(policy: any) { + if (typeof policy === "boolean" || typeof policy === "undefined") { + return; + } + if (typeof policy !== "object") { + throw new Error(`failurePolicy must be a boolean or an object.`); + } + + const retry = policy.retry; + if (typeof retry !== "object" || Object.keys(retry).length) { + throw new Error("failurePolicy.retry must be an empty object."); + } +} + +/** + * Assert regions specified are valid. + * @param regions list of regions. + * @throws { Error } Regions must be in list of supported regions. + */ +function assertRegionsAreValid(regions: (string | Expression | ResetValue)[]): boolean { + if (!regions.length) { + throw new Error("You must specify at least one region"); + } + return true; +} + +/** + * Configure the regions that the function is deployed to. + * @param regions One of more region strings. + * @example + * functions.region('us-east1') + * @example + * functions.region('us-east1', 'us-central1') + */ +export function region( + ...regions: Array<(typeof SUPPORTED_REGIONS)[number] | string | Expression | ResetValue> +): FunctionBuilder { + if (assertRegionsAreValid(regions)) { + return new FunctionBuilder({ regions }); + } +} + +/** + * Configure runtime options for the function. + * @param runtimeOptions Object with optional fields: + * 1. `memory`: amount of memory to allocate to the function, possible values + * are: '128MB', '256MB', '512MB', '1GB', '2GB', '4GB', and '8GB'. + * 2. `timeoutSeconds`: timeout for the function in seconds, possible values are + * 0 to 540. + * 3. `failurePolicy`: failure policy of the function, with boolean `true` being + * equivalent to providing an empty retry object. + * 4. `vpcConnector`: id of a VPC connector in same project and region. + * 5. `vpcConnectorEgressSettings`: when a vpcConnector is set, control which + * egress traffic is sent through the vpcConnector. + * 6. `serviceAccount`: Specific service account for the function. + * 7. `ingressSettings`: ingress settings for the function, which control where a HTTPS + * function can be called from. + * + * Value must not be null. + */ +export function runWith(runtimeOptions: RuntimeOptions): FunctionBuilder { + if (assertRuntimeOptionsValid(runtimeOptions)) { + return new FunctionBuilder(runtimeOptions); + } +} + +export class FunctionBuilder { + constructor(private options: DeploymentOptions) {} + + /** + * Configure the regions that the function is deployed to. + * @param regions One or more region strings. + * @example + * functions.region('us-east1') + * @example + * functions.region('us-east1', 'us-central1') + */ + region( + ...regions: Array<(typeof SUPPORTED_REGIONS)[number] | string | Expression | ResetValue> + ): FunctionBuilder { + if (assertRegionsAreValid(regions)) { + this.options.regions = regions; + return this; + } + } + + /** + * Configure runtime options for the function. + * @param runtimeOptions Object with optional fields: + * 1. `memory`: amount of memory to allocate to the function, possible values + * are: '128MB', '256MB', '512MB', '1GB', '2GB', '4GB', and '8GB'. + * 2. `timeoutSeconds`: timeout for the function in seconds, possible values are + * 0 to 540. + * 3. `failurePolicy`: failure policy of the function, with boolean `true` being + * equivalent to providing an empty retry object. + * 4. `vpcConnector`: id of a VPC connector in the same project and region + * 5. `vpcConnectorEgressSettings`: when a `vpcConnector` is set, control which + * egress traffic is sent through the `vpcConnector`. + * + * Value must not be null. + */ + runWith(runtimeOptions: RuntimeOptions): FunctionBuilder { + if (assertRuntimeOptionsValid(runtimeOptions)) { + this.options = { + ...this.options, + ...runtimeOptions, + }; + return this; + } + } + + get https() { + if (this.options.failurePolicy !== undefined) { + console.warn("RuntimeOptions.failurePolicy is not supported in https functions."); + } + + return { + /** + * Handle HTTP requests. + * @param handler A function that takes a request and response object, + * same signature as an Express app. + */ + onRequest: (handler: (req: https.Request, resp: express.Response) => void | Promise) => + https._onRequestWithOptions(handler, this.options), + /** + * Declares a callable method for clients to call using a Firebase SDK. + * @param handler A method that takes a data and context and returns a value. + */ + onCall: (handler: (data: any, context: https.CallableContext) => any | Promise) => + https._onCallWithOptions(handler, this.options), + }; + } + + get tasks() { + return { + /** + * Declares a task queue function for clients to call using a Firebase Admin SDK. + * @param options Configurations for the task queue function. + */ + /** @hidden */ + taskQueue: (options?: tasks.TaskQueueOptions) => { + return new tasks.TaskQueueBuilder(options, this.options); + }, + }; + } + + get database() { + return { + /** + * Selects a database instance that will trigger the function. If omitted, + * will pick the default database for your project. + * @param instance The Realtime Database instance to use. + */ + instance: (instance: string) => database._instanceWithOptions(instance, this.options), + + /** + * Select Firebase Realtime Database Reference to listen to. + * + * This method behaves very similarly to the method of the same name in + * the client and Admin Firebase SDKs. Any change to the Database that + * affects the data at or below the provided `path` will fire an event in + * Cloud Functions. + * + * There are three important differences between listening to a Realtime + * Database event in Cloud Functions and using the Realtime Database in + * the client and Admin SDKs: + * 1. Cloud Functions allows wildcards in the `path` name. Any `path` + * component in curly brackets (`{}`) is a wildcard that matches all + * strings. The value that matched a certain invocation of a Cloud + * Function is returned as part of the `context.params` object. For + * example, `ref("messages/{messageId}")` matches changes at + * `/messages/message1` or `/messages/message2`, resulting in + * `context.params.messageId` being set to `"message1"` or + * `"message2"`, respectively. + * 2. Cloud Functions do not fire an event for data that already existed + * before the Cloud Function was deployed. + * 3. Cloud Function events have access to more information, including + * information about the user who triggered the Cloud Function. + * @param ref Path of the database to listen to. + */ + ref: (path: Ref) => database._refWithOptions(path, this.options), + }; + } + + get firestore() { + return { + /** + * Select the Firestore document to listen to for events. + * @param path Full database path to listen to. This includes the name of + * the collection that the document is a part of. For example, if the + * collection is named "users" and the document is named "Ada", then the + * path is "/users/Ada". + */ + document: (path: Path) => + firestore._documentWithOptions(path, this.options), + + /** @hidden */ + namespace: (namespace: string) => firestore._namespaceWithOptions(namespace, this.options), + + /** @hidden */ + database: (database: string) => firestore._databaseWithOptions(database, this.options), + }; + } + + get analytics() { + return { + /** + * Select analytics events to listen to for events. + * @param analyticsEventType Name of the analytics event type. + */ + event: (analyticsEventType: string) => + analytics._eventWithOptions(analyticsEventType, this.options), + }; + } + + get remoteConfig() { + return { + /** + * Handle all updates (including rollbacks) that affect a Remote Config + * project. + * @param handler A function that takes the updated Remote Config template + * version metadata as an argument. + */ + onUpdate: ( + handler: ( + version: remoteConfig.TemplateVersion, + context: EventContext + ) => PromiseLike | any + ) => remoteConfig._onUpdateWithOptions(handler, this.options), + }; + } + + get storage() { + return { + /** + * The optional bucket function allows you to choose which buckets' events + * to handle. This step can be bypassed by calling object() directly, + * which will use the default Cloud Storage for Firebase bucket. + * @param bucket Name of the Google Cloud Storage bucket to listen to. + */ + bucket: (bucket?: string) => storage._bucketWithOptions(this.options, bucket), + + /** + * Handle events related to Cloud Storage objects. + */ + object: () => storage._objectWithOptions(this.options), + }; + } + + get pubsub() { + return { + /** + * Select Cloud Pub/Sub topic to listen to. + * @param topic Name of Pub/Sub topic, must belong to the same project as + * the function. + */ + topic: (topic: string) => pubsub._topicWithOptions(topic, this.options), + schedule: (schedule: string) => pubsub._scheduleWithOptions(schedule, this.options), + }; + } + + get auth() { + return { + /** + * Handle events related to Firebase authentication users. + */ + user: (userOptions?: auth.UserOptions) => auth._userWithOptions(this.options, userOptions), + }; + } + + get testLab() { + return { + /** + * Handle events related to Test Lab test matrices. + */ + testMatrix: () => testLab._testMatrixWithOpts(this.options), + }; + } +} diff --git a/src/v1/function-configuration.ts b/src/v1/function-configuration.ts new file mode 100644 index 000000000..90aa391fc --- /dev/null +++ b/src/v1/function-configuration.ts @@ -0,0 +1,303 @@ +import { Expression } from "../params"; +import { ResetValue } from "../common/options"; +import { SecretParam } from "../params/types"; + +export { RESET_VALUE } from "../common/options"; + +/** + * List of all regions supported by Cloud Functions. + */ +export const SUPPORTED_REGIONS = [ + "us-central1", + "us-east1", + "us-east4", + "us-west2", + "us-west3", + "us-west4", + "europe-central2", + "europe-west1", + "europe-west2", + "europe-west3", + "europe-west6", + "asia-east1", + "asia-east2", + "asia-northeast1", + "asia-northeast2", + "asia-northeast3", + "asia-south1", + "asia-southeast1", + "asia-southeast2", + "northamerica-northeast1", + "southamerica-east1", + "australia-southeast1", +] as const; + +/** + * Cloud Functions min timeout value. + */ +export const MIN_TIMEOUT_SECONDS = 0; + +/** + * Cloud Functions max timeout value. + */ +export const MAX_TIMEOUT_SECONDS = 540; + +/** + * List of available memory options supported by Cloud Functions. + */ +export const VALID_MEMORY_OPTIONS = [ + "128MB", + "256MB", + "512MB", + "1GB", + "2GB", + "4GB", + "8GB", +] as const; + +/** + * List of available options for VpcConnectorEgressSettings. + */ +export const VPC_EGRESS_SETTINGS_OPTIONS = [ + "VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED", + "PRIVATE_RANGES_ONLY", + "ALL_TRAFFIC", +] as const; + +/** + * List of available options for IngressSettings. + */ +export const INGRESS_SETTINGS_OPTIONS = [ + "INGRESS_SETTINGS_UNSPECIFIED", + "ALLOW_ALL", + "ALLOW_INTERNAL_ONLY", + "ALLOW_INTERNAL_AND_GCLB", +] as const; + +/** + * Scheduler retry options. Applies only to scheduled functions. + */ +export interface ScheduleRetryConfig { + /** + * The number of attempts that the system will make to run a job using the exponential backoff procedure described by {@link ScheduleRetryConfig.maxDoublings}. + * + * @defaultValue 0 (infinite retry) + */ + retryCount?: number | Expression | ResetValue; + /** + * The time limit for retrying a failed job, measured from time when an execution was first attempted. + * + * If specified with {@link ScheduleRetryConfig.retryCount}, the job will be retried until both limits are reached. + * + * @defaultValue 0 + */ + maxRetryDuration?: string | Expression | ResetValue; + /** + * The minimum amount of time to wait before retrying a job after it fails. + * + * @defaultValue 5 seconds + */ + minBackoffDuration?: string | Expression | ResetValue; + /** + * The maximum amount of time to wait before retrying a job after it fails. + * + * @defaultValue 1 hour + */ + maxBackoffDuration?: string | Expression | ResetValue; + /** + * The max number of backoff doubling applied at each retry. + * + * @defaultValue 5 + */ + maxDoublings?: number | Expression | ResetValue; +} + +/** + * Configuration options for scheduled functions. + */ +export interface Schedule { + /** + * Describes the schedule on which the job will be executed. + * + * The schedule can be either of the following types: + * + * 1. {@link https://en.wikipedia.org/wiki/Cron#Overview | Crontab} + * + * 2. English-like {@link https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules | schedule} + * + * @example + * ``` + * // Crontab schedule + * schedule: "0 9 * * 1"` // Every Monday at 09:00 AM + * + * // English-like schedule + * schedule: "every 5 minutes" + * ``` + */ + schedule: string; + /** + * Specifies the time zone to be used in interpreting {@link Schedule.schedule}. + * + * The value of this field must be a time zone name from the tz database. + */ + timeZone?: string | ResetValue; + /** + * Settings that determine the retry behavior. + */ + retryConfig?: ScheduleRetryConfig; +} + +/** + * Configuration option for failure policy on background functions. + */ +export interface FailurePolicy { + /** + * Retry configuration. Must be an empty object. + * + */ + retry: Record; +} + +export const DEFAULT_FAILURE_POLICY: FailurePolicy = { + retry: {}, +}; + +export const MAX_NUMBER_USER_LABELS = 58; + +/** + * Configuration options for a function that applicable at runtime. + */ +export interface RuntimeOptions { + /** + * Which platform should host the backend. Valid options are "gcfv1" + * @internal + */ + platform?: "gcfv1"; + + /** + * Failure policy of the function, with boolean `true` being equivalent to + * providing an empty retry object. + */ + failurePolicy?: FailurePolicy | boolean; + /** + * Amount of memory to allocate to the function. + */ + memory?: (typeof VALID_MEMORY_OPTIONS)[number] | Expression | ResetValue; + /** + * Timeout for the function in seconds, possible values are 0 to 540. + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of actual instances allowed to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: (typeof VPC_EGRESS_SETTINGS_OPTIONS)[number] | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: "default" | string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: (typeof INGRESS_SETTINGS_OPTIONS)[number] | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /** + * Invoker to set access control on https functions. + */ + invoker?: "public" | "private" | string | string[]; + + /* + * Secrets to bind to a function instance. + */ + secrets?: (string | SecretParam)[]; + + /** + * Determines whether Firebase AppCheck is enforced. + * + * @remarks + * When true, requests with invalid tokens autorespond with a 401 + * (Unauthorized) error. + * When false, requests with invalid tokens set context.app to undefiend. + */ + enforceAppCheck?: boolean; + + /** + * Determines whether Firebase App Check token is consumed on request. Defaults to false. + * + * @remarks + * Set this to true to enable the App Check replay protection feature by consuming the App Check token on callable + * request. Tokens that are found to be already consumed will have the `request.app.alreadyConsumed` property set + * to true. + * + * + * Tokens are only considered to be consumed if it is sent to the App Check service by setting this option to true. + * Other uses of the token do not consume it. + * + * This replay protection feature requires an additional network call to the App Check backend and forces the clients + * to obtain a fresh attestation from the chosen attestation providers. This can therefore negatively impact + * performance and can potentially deplete your attestation providers' quotas faster. Use this feature only for + * protecting low volume, security critical, or expensive operations. + * + * This option does not affect the `enforceAppCheck` option. Setting the latter to true will cause the callable function + * to automatically respond with a 401 Unauthorized status code when the request includes an invalid App Check token. + * When the request includes valid but consumed App Check tokens, requests will not be automatically rejected. Instead, + * the `request.app.alreadyConsumed` property will be set to true and pass the execution to the handler code for making + * further decisions, such as requiring additional security checks or rejecting the request. + */ + consumeAppCheckToken?: boolean; + + /** + * Controls whether function configuration modified outside of function source is preserved. Defaults to false. + * + * @remarks + * When setting configuration available in the underlying platform that is not yet available in the Firebase Functions + * SDK, we highly recommend setting `preserveExternalChanges` to `true`. Otherwise, when the Firebase Functions SDK releases + * a new version of the SDK with support for the missing configuration, your function's manually configured setting + * may inadvertently be wiped out. + */ + preserveExternalChanges?: boolean; +} + +/** + * Configuration options for a function that applies during function deployment. + */ +export interface DeploymentOptions extends RuntimeOptions { + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + /** + * Regions where function should be deployed. + */ + regions?: Array<(typeof SUPPORTED_REGIONS)[number] | string | Expression | ResetValue>; + /** + * Schedule for the scheduled function. + */ + schedule?: Schedule; +} diff --git a/src/v1/index.ts b/src/v1/index.ts new file mode 100644 index 000000000..7f3f9e10b --- /dev/null +++ b/src/v1/index.ts @@ -0,0 +1,63 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// Providers: +import * as logger from "../logger"; +import * as analytics from "./providers/analytics"; +import * as auth from "./providers/auth"; +import * as database from "./providers/database"; +import * as firestore from "./providers/firestore"; +import * as https from "./providers/https"; +import * as pubsub from "./providers/pubsub"; +import * as remoteConfig from "./providers/remoteConfig"; +import * as storage from "./providers/storage"; +import * as tasks from "./providers/tasks"; +import * as testLab from "./providers/testLab"; + +import { setApp as setEmulatedAdminApp } from "../common/app"; + +export { + analytics, + auth, + database, + firestore, + https, + pubsub, + remoteConfig, + storage, + tasks, + testLab, + logger, +}; + +export const app = { setEmulatedAdminApp }; + +// Exported root types: +export * from "./cloud-functions"; +export * from "./config"; +export * from "./function-builder"; +export * from "./function-configuration"; +// NOTE: Equivalent to `export * as params from "../params"` but api-extractor doesn't support that syntax. +import * as params from "../params"; +export { params }; + +export { onInit } from "../common/onInit"; diff --git a/src/providers/analytics.ts b/src/v1/providers/analytics.ts similarity index 55% rename from src/providers/analytics.ts rename to src/v1/providers/analytics.ts index 322b0d6e7..2168df487 100644 --- a/src/providers/analytics.ts +++ b/src/v1/providers/analytics.ts @@ -20,75 +20,77 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. -import { makeCloudFunction, CloudFunction, Event, EventContext } from '../cloud-functions'; -import * as _ from 'lodash'; +import { CloudFunction, Event, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; /** @internal */ -export const provider = 'google.analytics'; +export const provider = "google.analytics"; /** @internal */ -export const service = 'app-measurement.com'; +export const service = "app-measurement.com"; /** - * Registers a Cloud Function to handle analytics events. + * Registers a function to handle analytics events. * - * @param {string} analyticsEventType Name of the analytics event type to which + * @param analyticsEventType Name of the analytics event type to which * this Cloud Function is scoped. * - * @return {!functions.analytics.AnalyticsEventBuilder} Analytics event builder - * interface. + * @returns Analytics event builder interface. */ export function event(analyticsEventType: string) { + return _eventWithOptions(analyticsEventType, {}); +} + +/** @internal */ +export function _eventWithOptions(analyticsEventType: string, options: DeploymentOptions) { return new AnalyticsEventBuilder(() => { if (!process.env.GCLOUD_PROJECT) { - throw new Error('process.env.GCLOUD_PROJECT is not set.'); + throw new Error("process.env.GCLOUD_PROJECT is not set."); } - return 'projects/' + process.env.GCLOUD_PROJECT + '/events/' + analyticsEventType; - }); + return "projects/" + process.env.GCLOUD_PROJECT + "/events/" + analyticsEventType; + }, options); } /** * The Firebase Analytics event builder interface. * - * Access via [`functions.analytics.event()`](functions.analytics#event). + * Access via `functions.analytics.event()`. */ export class AnalyticsEventBuilder { - /** @internal */ - constructor(private triggerResource: () => string) { } + /** @hidden */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} /** * Event handler that fires every time a Firebase Analytics event occurs. * - * @param {!function(!functions.Event)} - * handler Event handler that fires every time a Firebase Analytics event + * @param handler Event handler that fires every time a Firebase Analytics event * occurs. * - * @return {!functions.CloudFunction} A - * Cloud Function you can export. + * @returns A function that you can export and deploy. */ onLog( - handler: (event: AnalyticsEvent, context: EventContext) => PromiseLike | any): CloudFunction { + handler: (event: AnalyticsEvent, context: EventContext) => PromiseLike | any + ): CloudFunction { const dataConstructor = (raw: Event) => { return new AnalyticsEvent(raw.data); }; return makeCloudFunction({ handler, provider, - eventType: 'event.log', + eventType: "event.log", service, legacyEventType: `providers/google.firebase.analytics/eventTypes/event.log`, triggerResource: this.triggerResource, dataConstructor, + options: this.options, }); } } -/** - * Interface representing a Firebase Analytics event that was logged for a specific user. - */ +/** Interface representing a Firebase Analytics event that was logged for a specific user. */ export class AnalyticsEvent { /** - * The date on which the event.was logged. - * (`YYYYMMDD` format in the registered timezone of your app). + * The date on which the event.was logged. + * (`YYYYMMDD` format in the registered timezone of your app). */ reportingDate: string; @@ -116,21 +118,38 @@ export class AnalyticsEvent { /** User-related dimensions. */ user?: UserDimensions; - /** @internal */ + /** @hidden */ constructor(wireFormat: any) { - this.params = {}; // In case of absent field, show empty (not absent) map. + this.params = {}; // In case of absent field, show empty (not absent) map. if (wireFormat.eventDim && wireFormat.eventDim.length > 0) { // If there's an eventDim, there'll always be exactly one. - let eventDim = wireFormat.eventDim[0]; - copyField(eventDim, this, 'name'); - copyField(eventDim, this, 'params', p => _.mapValues(p, unwrapValue)); - copyFieldTo(eventDim, this, 'valueInUsd', 'valueInUSD'); - copyFieldTo(eventDim, this, 'date', 'reportingDate'); - copyTimestampToString(eventDim, this, 'timestampMicros', 'logTime'); - copyTimestampToString(eventDim, this, 'previousTimestampMicros', 'previousLogTime'); + const eventDim = wireFormat.eventDim[0]; + copyField(eventDim, this, "name"); + copyField(eventDim, this, "params", (p) => mapKeys(p, unwrapValue)); + copyFieldTo(eventDim, this, "valueInUsd", "valueInUSD"); + copyFieldTo(eventDim, this, "date", "reportingDate"); + copyTimestampToString(eventDim, this, "timestampMicros", "logTime"); + copyTimestampToString(eventDim, this, "previousTimestampMicros", "previousLogTime"); } - copyFieldTo(wireFormat, this, 'userDim', 'user', dim => new UserDimensions(dim)); + copyFieldTo(wireFormat, this, "userDim", "user", (dim) => new UserDimensions(dim)); + } +} + +function isValidUserProperty(property: unknown): property is { value: unknown } { + if (property == null || typeof property !== "object" || !("value" in property)) { + return false; + } + + const { value } = property; + if (value == null) { + return false; } + + if (typeof value === "object" && Object.keys(value).length === 0) { + return false; + } + + return true; } /** @@ -153,7 +172,7 @@ export class UserDimensions { * A map of user properties set with the * [`setUserProperty`](https://firebase.google.com/docs/analytics/android/properties) API. * - * All values are [`UserPropertyValue`](functions.analytics.UserPropertyValue) objects. + * All values are [`UserPropertyValue`](providers_analytics_.userpropertyvalue) objects. */ userProperties: { [key: string]: UserPropertyValue }; @@ -169,85 +188,96 @@ export class UserDimensions { /** Information regarding the bundle in which these events were uploaded. */ bundleInfo: ExportBundleInfo; - /** @internal */ + /** @hidden */ constructor(wireFormat: any) { // These are interfaces or primitives, no transformation needed. - copyFields(wireFormat, this, ['userId', 'deviceInfo', 'geoInfo', 'appInfo']); + copyFields(wireFormat, this, ["userId", "deviceInfo", "geoInfo", "appInfo"]); // The following fields do need transformations of some sort. - copyTimestampToString(wireFormat, this, 'firstOpenTimestampMicros', 'firstOpenTime'); - this.userProperties = {}; // With no entries in the wire format, present an empty (as opposed to absent) map. - copyField(wireFormat, this, 'userProperties', r => _.mapValues(r, p => new UserPropertyValue(p))); - copyField(wireFormat, this, 'bundleInfo', r => new ExportBundleInfo(r)); + copyTimestampToString(wireFormat, this, "firstOpenTimestampMicros", "firstOpenTime"); + this.userProperties = {}; // With no entries in the wire format, present an empty (as opposed to absent) map. + copyField(wireFormat, this, "userProperties", (r: unknown) => { + const entries = Object.entries(r as Record) + .filter(([, v]) => isValidUserProperty(v)) + .map(([k, v]) => [k, new UserPropertyValue(v)]); + return Object.fromEntries(entries); + }); + copyField(wireFormat, this, "bundleInfo", (r) => new ExportBundleInfo(r) as any); // BUG(36000368) Remove when no longer necessary /* tslint:disable:no-string-literal */ - if (!this.userId && this.userProperties['user_id']) { - this.userId = this.userProperties['user_id'].value; + if (!this.userId && this.userProperties["user_id"]) { + this.userId = this.userProperties["user_id"].value; } /* tslint:enable:no-string-literal */ } } -/** - * Predefined or custom properties stored on the client side. - */ +/** Predefined or custom properties stored on the client side. */ export class UserPropertyValue { - /** Last set value of a user property. */ + /** The last set value of a user property. */ value: string; /** UTC client time when the user property was last set. */ setTime: string; - /** @internal */ + /** @hidden */ constructor(wireFormat: any) { - copyField(wireFormat, this, 'value', unwrapValueAsString); - copyTimestampToString(wireFormat, this, 'setTimestampUsec', 'setTime'); + copyField(wireFormat, this, "value", unwrapValueAsString as any); + copyTimestampToString(wireFormat, this, "setTimestampUsec", "setTime"); } } /** - * Interface representing the device that triggered these Firebase Analytics events. + * Interface representing the device that triggered these + * Firebase Analytics events. */ export interface DeviceInfo { /** * Device category. + * * Examples: "tablet" or "mobile". */ deviceCategory?: string; /** * Device brand name. + * * Examples: "Samsung", "HTC" */ mobileBrandName?: string; /** * Device model name in human-readable format. + * * Example: "iPhone 7" */ mobileModelName?: string; /** * Device marketing name. + * * Example: "Galaxy S4 Mini" */ mobileMarketingName?: string; /** * Device model, as read from the OS. + * * Example: "iPhone9,1" */ deviceModel?: string; /** * Device OS version when data capture ended. + * * Example: "4.4.2" */ platformVersion?: string; /** * Vendor specific device identifier. This is IDFV on iOS. Not used for Android. + * * Example: '599F9C00-92DC-4B5C-9464-7971F01F8370' */ deviceId?: string; @@ -270,98 +300,149 @@ export interface DeviceInfo { /** * The time zone of the device when data was uploaded, as seconds skew from UTC. - * Use this to calculate the device's local time for [`event.timestamp`](functions.Event#timestamp)`. + * Use this to calculate the device's local time for + * [`EventContext.timestamp`](cloud_functions_eventcontext.html#timestamp). */ deviceTimeZoneOffsetSeconds: number; /** * The device's Limit Ad Tracking setting. * When `true`, you cannot use `resettableDeviceId` for remarketing, demographics or influencing ads serving - * behaviour. However, you can use resettableDeviceId for conversion tracking and campaign attribution. + * behaviour. However, you can use `resettableDeviceId` for conversion tracking and campaign attribution. */ limitedAdTracking: boolean; } -/** - * Interface representing the geographic origin of the events. - */ +/** Interface representing the geographic origin of the events. */ export interface GeoInfo { - /** The geographic continent. Example: "Americas". */ + /** + * The geographic continent. + * + * Example: "South America". + */ continent?: string; - /** The geographic country. Example: "Brazil". */ + /** + * The geographic country. + * + * Example: "Brazil". + */ country?: string; - /** The geographic region. Example: "State of Sao Paulo". */ + /** + * The geographic region. + * + * Example: "State of Sao Paulo". + */ region?: string; - /** The geographic city. Example: "Sao Paulo". */ + /** + * The geographic city. + * + * Example: "Sao Paulo". + */ city?: string; } -/** - * Interface representing the application that triggered these events. - */ +/** Interface representing the application that triggered these events. */ export interface AppInfo { /** - * The app's version name. - * Examples: "1.0", "4.3.1.1.213361", "2.3 (1824253)", "v1.8b22p6". + * The app's version name. + * + * Examples: "1.0", "4.3.1.1.213361", "2.3 (1824253)", "v1.8b22p6". */ appVersion?: string; /** - * Unique id for this instance of the app. - * Example: "71683BF9FA3B4B0D9535A1F05188BAF3". + * Unique ID for this instance of the app. + * + * Example: "71683BF9FA3B4B0D9535A1F05188BAF3". */ appInstanceId: string; /** - * The identifier of the store that installed the app. - * Examples: "com.sec.android.app.samsungapps", "com.amazon.venezia", "com.nokia.nstore". + * The identifier of the store that installed the app. + * + * Examples: "com.sec.android.app.samsungapps", "com.amazon.venezia", "com.nokia.nstore". */ appStore?: string; - /** The app platform. Examples: "ANDROID", "IOS". */ + /** + * The app platform. + * + * Examples: "ANDROID", "IOS". + */ appPlatform: string; /** Unique application identifier within an app store. */ appId?: string; } -/** - * Interface representing the bundle in which these events were uploaded. - */ +/** Interface representing the bundle these events were uploaded to. */ export class ExportBundleInfo { - /** Monotonically increasing index for each bundle set by the Analytics SDK. */ + /** Monotonically increasing index for each bundle set by the Analytics SDK. */ bundleSequenceId: number; /** Timestamp offset (in milliseconds) between collection time and upload time. */ serverTimestampOffset: number; - /** @internal */ + /** @hidden */ constructor(wireFormat: any) { - copyField(wireFormat, this, 'bundleSequenceId'); - copyTimestampToMillis(wireFormat, this, 'serverTimestampOffsetMicros', 'serverTimestampOffset'); + copyField(wireFormat, this, "bundleSequenceId"); + copyTimestampToMillis(wireFormat, this, "serverTimestampOffsetMicros", "serverTimestampOffset"); } } -function copyFieldTo( - from: any, to: T, fromField: string, toField: K, transform: (val: any) => T[K] = _.identity): void { - if (from[fromField] !== undefined) { +/** @hidden */ +function copyFieldTo( + from: From, + to: To, + fromField: FromKey, + toField: ToKey, + transform?: (val: Required[FromKey]) => Required[ToKey] +): void { + if (typeof from[fromField] === "undefined") { + return; + } + if (transform) { to[toField] = transform(from[fromField]); + return; } + to[toField] = from[fromField] as any; } -function copyField(from: any, to: T, field: K, transform: (val: any) => T[K] = _.identity): void { +/** @hidden */ +function copyField( + from: From, + to: To, + field: Key, + transform: (val: Required[Key]) => Required[Key] = (from) => from as any +): void { copyFieldTo(from, to, field, field, transform); } -function copyFields(from: any, to: T, fields: K[]): void { - for (let field of fields) { +/** @hidden */ +function copyFields( + from: From, + to: To, + fields: Key[] +): void { + for (const field of fields) { copyField(from, to, field); } } +type TransformedObject any> = { + [key in keyof Obj]: ReturnType; +}; +function mapKeys any>( + obj: Obj, + transform: Transform +): TransformedObject { + const entries = Object.entries(obj).map(([k, v]) => [k, transform(v)]); + return Object.fromEntries(entries); +} + // The incoming payload will have fields like: // { // 'myInt': { @@ -391,10 +472,23 @@ function copyFields(from: any, to: T, fields: K[]): void { // is due to the encoding library, which renders int64 values as strings to avoid loss of precision. This // method always returns a string, similarly to avoid loss of precision, unlike the less-conservative // 'unwrapValue' method just below. -function unwrapValueAsString(wrapped: any): string { - let key: string = _.keys(wrapped)[0]; - return _.toString(wrapped[key]); +/** @hidden */ +function unwrapValueAsString(wrapped: unknown): string { + if (!wrapped || typeof wrapped !== "object") { + return ""; + } + const keys = Object.keys(wrapped); + if (keys.length === 0) { + return ""; + } + const key: string = keys[0]; + const value = (wrapped as Record)[key]; + if (value === null || value === undefined) { + return ""; + } + return value.toString(); } + // Ditto as the method above, but returning the values in the idiomatic JavaScript type (string for strings, // number for numbers): // { @@ -408,27 +502,42 @@ function unwrapValueAsString(wrapped: any): string { // purposes can be divided into 'number' versus 'string'. This method will render all the numbers as // JavaScript's 'number' type, since we prefer using idiomatic types. Note that this may lead to loss // in precision for int64 fields, so use with care. -const xValueNumberFields = ['intValue', 'floatValue', 'doubleValue']; +/** @hidden */ +const xValueNumberFields = ["intValue", "floatValue", "doubleValue"]; + +/** @hidden */ function unwrapValue(wrapped: any): any { - let key: string = _.keys(wrapped)[0]; - let value: string = unwrapValueAsString(wrapped); - return _.includes(xValueNumberFields, key) ? _.toNumber(value) : value; + const key: string = Object.keys(wrapped)[0]; + const value: string = unwrapValueAsString(wrapped); + return xValueNumberFields.includes(key) ? Number(value) : value; } // The JSON payload delivers timestamp fields as strings of timestamps denoted in microseconds. // The JavaScript convention is to use numbers denoted in milliseconds. This method // makes it easy to convert a field of one type into the other. -function copyTimestampToMillis(from: any, to: T, fromName: string, toName: K) { +/** @hidden */ +function copyTimestampToMillis( + from: any, + to: T, + fromName: string, + toName: K +) { if (from[fromName] !== undefined) { - to[toName] = _.round(from[fromName] / 1000); + to[toName] = Math.round(from[fromName] / 1000) as any; } } // The JSON payload delivers timestamp fields as strings of timestamps denoted in microseconds. // In our SDK, we'd like to present timestamp as ISO-format strings. This method makes it easy // to convert a field of one type into the other. -function copyTimestampToString(from: any, to: T, fromName: string, toName: K) { +/** @hidden */ +function copyTimestampToString( + from: any, + to: T, + fromName: string, + toName: K +) { if (from[fromName] !== undefined) { - to[toName] = (new Date(from[fromName] / 1000)).toISOString(); + to[toName] = new Date(from[fromName] / 1000).toISOString() as any; } } diff --git a/src/v1/providers/auth.ts b/src/v1/providers/auth.ts new file mode 100644 index 000000000..e6990f495 --- /dev/null +++ b/src/v1/providers/auth.ts @@ -0,0 +1,258 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { + AuthBlockingEventType, + AuthEventContext, + AuthUserRecord, + BeforeCreateResponse, + BeforeEmailResponse, + BeforeSignInResponse, + BeforeSmsResponse, + HandlerV1, + HttpsError, + MaybeAsync, + UserInfo, + UserRecord, + userRecordConstructor, + UserRecordMetadata, + wrapHandler, +} from "../../common/providers/identity"; +import { + BlockingFunction, + CloudFunction, + Event, + EventContext, + makeCloudFunction, + optionsToEndpoint, + optionsToTrigger, +} from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; +import { initV1Endpoint } from "../../runtime/manifest"; + +// TODO: yank in next breaking change release +export { UserRecordMetadata, userRecordConstructor }; +export type { UserRecord, UserInfo }; + +export { HttpsError }; + +/** @internal */ +export const provider = "google.firebase.auth"; +/** @internal */ +export const service = "firebaseauth.googleapis.com"; + +/** + * Options for Auth blocking function. + */ +export interface UserOptions { + /** Options to set configuration at the resource level for blocking functions. */ + blockingOptions?: { + /** Pass the ID Token credential to the function. */ + idToken?: boolean; + + /** Pass the Access Token credential to the function. */ + accessToken?: boolean; + + /** Pass the Refresh Token credential to the function. */ + refreshToken?: boolean; + }; +} + +/** + * Handles events related to Firebase Auth users events. + * + * @param userOptions - Resource level options + * @returns UserBuilder - Builder used to create functions for Firebase Auth user lifecycle events + * + * @public + */ +export function user(userOptions?: UserOptions): UserBuilder { + return _userWithOptions({}, userOptions || {}); +} + +/** @internal */ +export function _userWithOptions(options: DeploymentOptions, userOptions: UserOptions) { + return new UserBuilder( + () => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + return "projects/" + process.env.GCLOUD_PROJECT; + }, + options, + userOptions + ); +} + +/** + * Builder used to create functions for Firebase Auth user lifecycle events. + * @public + */ +export class UserBuilder { + private static dataConstructor(raw: Event): UserRecord { + return userRecordConstructor(raw.data); + } + + /* @internal */ + constructor( + private triggerResource: () => string, + private options: DeploymentOptions, + private userOptions?: UserOptions + ) {} + + /** + * Responds to the creation of a Firebase Auth user. + * + * @param handler Event handler that responds to the creation of a Firebase Auth user. + * + * @public + */ + onCreate( + handler: (user: UserRecord, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "user.create"); + } + + /** + * Responds to the deletion of a Firebase Auth user. + * + * @param handler Event handler that responds to the deletion of a Firebase Auth user. + * + * @public + */ + onDelete( + handler: (user: UserRecord, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "user.delete"); + } + + /** + * Blocks request to create a Firebase Auth user. + * + * @param handler Event handler that blocks creation of a Firebase Auth user. + * + * @public + */ + beforeCreate( + handler: ( + user: AuthUserRecord, + context: AuthEventContext + ) => MaybeAsync + ): BlockingFunction { + return this.beforeOperation(handler, "beforeCreate"); + } + + /** + * Blocks request to sign-in a Firebase Auth user. + * + * @param handler Event handler that blocks sign-in of a Firebase Auth user. + * + * @public + */ + beforeSignIn( + handler: ( + user: AuthUserRecord, + context: AuthEventContext + ) => MaybeAsync + ): BlockingFunction { + return this.beforeOperation(handler, "beforeSignIn"); + } + + beforeEmail( + handler: (context: AuthEventContext) => MaybeAsync + ): BlockingFunction { + return this.beforeOperation(handler, "beforeSendEmail"); + } + + beforeSms( + handler: (context: AuthEventContext) => MaybeAsync + ): BlockingFunction { + return this.beforeOperation(handler, "beforeSendSms"); + } + + private onOperation( + handler: (user: UserRecord, context: EventContext) => PromiseLike | any, + eventType: string + ): CloudFunction { + return makeCloudFunction({ + handler, + provider, + eventType, + service, + triggerResource: this.triggerResource, + // eslint-disable-next-line @typescript-eslint/unbound-method + dataConstructor: UserBuilder.dataConstructor, + legacyEventType: `providers/firebase.auth/eventTypes/${eventType}`, + options: this.options, + }); + } + + private beforeOperation(handler: HandlerV1, eventType: AuthBlockingEventType): BlockingFunction { + const accessToken = this.userOptions?.blockingOptions?.accessToken || false; + const idToken = this.userOptions?.blockingOptions?.idToken || false; + const refreshToken = this.userOptions?.blockingOptions?.refreshToken || false; + + const annotatedHandler = Object.assign(handler, { platform: "gcfv1" as const }); + const func: any = wrapHandler(eventType, annotatedHandler); + + const legacyEventType = `providers/cloud.auth/eventTypes/user.${eventType}`; + + func.__trigger = { + labels: {}, + ...optionsToTrigger(this.options), + blockingTrigger: { + eventType: legacyEventType, + options: { + accessToken, + idToken, + refreshToken, + }, + }, + }; + + func.__endpoint = { + platform: "gcfv1", + labels: {}, + ...initV1Endpoint(this.options), + ...optionsToEndpoint(this.options), + blockingTrigger: { + eventType: legacyEventType, + options: { + accessToken, + idToken, + refreshToken, + }, + }, + }; + + func.__requiredAPIs = [ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]; + + func.run = handler; + + return func; + } +} diff --git a/src/v1/providers/database.ts b/src/v1/providers/database.ts new file mode 100644 index 000000000..d59d7de19 --- /dev/null +++ b/src/v1/providers/database.ts @@ -0,0 +1,323 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { getApp } from "../../common/app"; +import { Change } from "../../common/change"; +import { firebaseConfig } from "../../common/config"; +import { ParamsOf } from "../../common/params"; +import { DataSnapshot } from "../../common/providers/database"; +import { normalizePath } from "../../common/utilities/path"; +import { applyChange } from "../../common/utilities/utils"; +import { CloudFunction, Event, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +export { DataSnapshot }; + +/** @internal */ +export const provider = "google.firebase.database"; +/** @internal */ +export const service = "firebaseio.com"; + +const databaseURLRegex = new RegExp("^https://([^.]+)."); +const emulatorDatabaseURLRegex = new RegExp("^http://.*ns=([^&]+)"); + +/** + * Registers a function that triggers on events from a specific + * Firebase Realtime Database instance. + * + * @remarks + * Use this method together with `ref` to specify the instance on which to + * watch for database events. For example: `firebase.database.instance('my-app-db-2').ref('/foo/bar')` + * + * Note that `functions.database.ref` used without `instance` watches the + * *default* instance for events. + * + * @param instance The instance name of the database instance + * to watch for write events. + * @returns Firebase Realtime Database instance builder interface. + */ +export function instance(instance: string) { + return _instanceWithOptions(instance, {}); +} + +/** + * Registers a function that triggers on Firebase Realtime Database write + * events. + * + * @remarks + * This method behaves very similarly to the method of the same name in the + * client and Admin Firebase SDKs. Any change to the Database that affects the + * data at or below the provided `path` will fire an event in Cloud Functions. + * + * There are three important differences between listening to a Realtime + * Database event in Cloud Functions and using the Realtime Database in the + * client and Admin SDKs: + * + * 1. Cloud Functions allows wildcards in the `path` name. Any `path` component + * in curly brackets (`{}`) is a wildcard that matches all strings. The value + * that matched a certain invocation of a Cloud Function is returned as part + * of the [`EventContext.params`](cloud_functions_eventcontext.html#params object. For + * example, `ref("messages/{messageId}")` matches changes at + * `/messages/message1` or `/messages/message2`, resulting in + * `event.params.messageId` being set to `"message1"` or `"message2"`, + * respectively. + * + * 2. Cloud Functions do not fire an event for data that already existed before + * the Cloud Function was deployed. + * + * 3. Cloud Function events have access to more information, including a + * snapshot of the previous event data and information about the user who + * triggered the Cloud Function. + * + * @param path The path within the Database to watch for write events. + * @returns Firebase Realtime Database builder interface. + */ +export function ref(path: Ref) { + return _refWithOptions(path, {}); +} + +/** @internal */ +export function _instanceWithOptions( + instance: string, + options: DeploymentOptions +): InstanceBuilder { + return new InstanceBuilder(instance, options); +} + +/** + * The Firebase Realtime Database instance builder interface. + * + * Access via [`database.instance()`](providers_database_.html#instance). + */ +export class InstanceBuilder { + constructor(private instance: string, private options: DeploymentOptions) {} + + /** + * @returns Firebase Realtime Database reference builder interface. + */ + ref(path: Ref): RefBuilder { + const normalized = normalizePath(path); + return new RefBuilder( + () => `projects/_/instances/${this.instance}/refs/${normalized}`, + this.options + ); + } +} + +/** @internal */ +export function _refWithOptions( + path: Ref, + options: DeploymentOptions +): RefBuilder { + const resourceGetter = () => { + const normalized = normalizePath(path); + const databaseURL = firebaseConfig().databaseURL; + if (!databaseURL) { + throw new Error( + "Missing expected firebase config value databaseURL, " + + "config is actually" + + JSON.stringify(firebaseConfig()) + + "\n If you are unit testing, please set process.env.FIREBASE_CONFIG" + ); + } + + let instance; + const prodMatch = databaseURL.match(databaseURLRegex); + if (prodMatch) { + instance = prodMatch[1]; + } else { + const emulatorMatch = databaseURL.match(emulatorDatabaseURLRegex); + if (emulatorMatch) { + instance = emulatorMatch[1]; + } + } + + if (!instance) { + throw new Error("Invalid value for config firebase.databaseURL: " + databaseURL); + } + + return `projects/_/instances/${instance}/refs/${normalized}`; + }; + + return new RefBuilder(resourceGetter, options); +} + +/** + * The Firebase Realtime Database reference builder interface. + * + * Access via [`functions.database.ref()`](functions.database#.ref). + */ +export class RefBuilder { + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** + * Event handler that fires every time a Firebase Realtime Database write + * of any kind (creation, update, or delete) occurs. + * + * @param handler Event handler that runs every time a Firebase Realtime Database + * write occurs. + * @returns A function that you can export and deploy. + */ + onWrite( + handler: ( + change: Change, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction> { + return this.onOperation(handler, "ref.write", this.changeConstructor); + } + + /** + * Event handler that fires every time data is updated in + * Firebase Realtime Database. + * + * @param handler Event handler which is run every time a Firebase Realtime Database + * write occurs. + * @returns A function which you can export and deploy. + */ + onUpdate( + handler: ( + change: Change, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction> { + return this.onOperation(handler, "ref.update", this.changeConstructor); + } + + /** + * Event handler that fires every time new data is created in + * Firebase Realtime Database. + * + * @param handler Event handler that runs every time new data is created in + * Firebase Realtime Database. + * @returns A function that you can export and deploy. + */ + onCreate( + handler: ( + snapshot: DataSnapshot, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction { + const dataConstructor = (raw: Event) => { + const [dbInstance, path] = extractInstanceAndPath( + raw.context.resource.name, + raw.context.domain + ); + return new DataSnapshot(raw.data.delta, path, getApp(), dbInstance); + }; + return this.onOperation(handler, "ref.create", dataConstructor); + } + + /** + * Event handler that fires every time data is deleted from + * Firebase Realtime Database. + * + * @param handler Event handler that runs every time data is deleted from + * Firebase Realtime Database. + * @returns A function that you can export and deploy. + */ + onDelete( + handler: ( + snapshot: DataSnapshot, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction { + const dataConstructor = (raw: Event) => { + const [dbInstance, path] = extractInstanceAndPath( + raw.context.resource.name, + raw.context.domain + ); + return new DataSnapshot(raw.data.data, path, getApp(), dbInstance); + }; + return this.onOperation(handler, "ref.delete", dataConstructor); + } + + private onOperation( + handler: (data: T, context: EventContext) => PromiseLike | any, + eventType: string, + dataConstructor: (raw: Event | Event) => any + ): CloudFunction { + return makeCloudFunction({ + handler, + provider, + service, + eventType, + legacyEventType: `providers/${provider}/eventTypes/${eventType}`, + triggerResource: this.triggerResource, + dataConstructor, + options: this.options, + }); + } + + private changeConstructor = (raw: Event): Change => { + const [dbInstance, path] = extractInstanceAndPath( + raw.context.resource.name, + raw.context.domain + ); + const before = new DataSnapshot(raw.data.data, path, getApp(), dbInstance); + const after = new DataSnapshot( + applyChange(raw.data.data, raw.data.delta), + path, + getApp(), + dbInstance + ); + return { + before, + after, + }; + }; +} + +const resourceRegex = /^projects\/([^/]+)\/instances\/([a-zA-Z0-9-]+)\/refs(\/.+)?/; + +/** + * Utility function to extract database reference from resource string + * + * @param optional database domain override for the original of the source database. + * It defaults to `firebaseio.com`. + * Multi-region RTDB will be served from different domains. + * Since region is not part of the resource name, it is provided through context. + * + * @internal + */ +export function extractInstanceAndPath(resource: string, domain = "firebaseio.com") { + const match = resource.match(new RegExp(resourceRegex)); + if (!match) { + throw new Error( + `Unexpected resource string for Firebase Realtime Database event: ${resource}. ` + + 'Expected string in the format of "projects/_/instances/{firebaseioSubdomain}/refs/{ref=**}"' + ); + } + const [, project, dbInstanceName, path] = match; + if (project !== "_") { + throw new Error(`Expect project to be '_' in a Firebase Realtime Database event`); + } + + const emuHost = process.env.FIREBASE_DATABASE_EMULATOR_HOST; + if (emuHost) { + const dbInstance = `http://${emuHost}/?ns=${dbInstanceName}`; + return [dbInstance, path]; + } else { + const dbInstance = "https://" + dbInstanceName + "." + domain; + return [dbInstance, path]; + } +} diff --git a/src/v1/providers/firestore.ts b/src/v1/providers/firestore.ts new file mode 100644 index 000000000..00ee71114 --- /dev/null +++ b/src/v1/providers/firestore.ts @@ -0,0 +1,207 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as firestore from "firebase-admin/firestore"; + +import { posix } from "path"; +import { Change } from "../../common/change"; +import { ParamsOf } from "../../common/params"; +import { + createBeforeSnapshotFromJson, + createSnapshotFromJson, +} from "../../common/providers/firestore"; +import { CloudFunction, Event, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +/** @internal */ +export const provider = "google.firestore"; +/** @internal */ +export const service = "firestore.googleapis.com"; +/** @internal */ +export const defaultDatabase = "(default)"; + +export type DocumentSnapshot = firestore.DocumentSnapshot; +export type QueryDocumentSnapshot = firestore.QueryDocumentSnapshot; + +/** + * Select the Firestore document to listen to for events. + * @param path Full database path to listen to. This includes the name of + * the collection that the document is a part of. For example, if the + * collection is named "users" and the document is named "Ada", then the + * path is "/users/Ada". + */ +export function document(path: Path) { + return _documentWithOptions(path, {}); +} + +// Multiple namespaces are not yet supported by Firestore. +export function namespace(namespace: string) { + return _namespaceWithOptions(namespace, {}); +} + +// Multiple databases are not yet supported by Firestore. +export function database(database: string) { + return _databaseWithOptions(database, {}); +} + +/** @internal */ +export function _databaseWithOptions( + database: string = defaultDatabase, + options: DeploymentOptions +) { + return new DatabaseBuilder(database, options); +} + +/** @internal */ +export function _namespaceWithOptions(namespace: string, options: DeploymentOptions) { + return _databaseWithOptions(defaultDatabase, options).namespace(namespace); +} + +/** @internal */ +export function _documentWithOptions(path: Path, options: DeploymentOptions) { + return _databaseWithOptions(defaultDatabase, options).document(path); +} + +export class DatabaseBuilder { + constructor(private database: string, private options: DeploymentOptions) {} + + namespace(namespace: string) { + return new NamespaceBuilder(this.database, this.options, namespace); + } + + document(path: Path) { + return new NamespaceBuilder(this.database, this.options).document(path); + } +} + +export class NamespaceBuilder { + constructor( + private database: string, + private options: DeploymentOptions, + private namespace?: string + ) {} + + document(path: Path) { + return new DocumentBuilder(() => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + const database = posix.join( + "projects", + process.env.GCLOUD_PROJECT, + "databases", + this.database + ); + return posix.join( + database, + this.namespace ? `documents@${this.namespace}` : "documents", + path + ); + }, this.options); + } +} + +export function snapshotConstructor(event: Event): DocumentSnapshot { + return createSnapshotFromJson( + event.data, + event.context.resource.name, + event?.data?.value?.readTime, + event?.data?.value?.updateTime + ); +} + +// TODO remove this function when wire format changes to new format +export function beforeSnapshotConstructor(event: Event): DocumentSnapshot { + return createBeforeSnapshotFromJson( + event.data, + event.context.resource.name, + event?.data?.oldValue?.readTime, + undefined + ); +} + +function changeConstructor(raw: Event) { + return Change.fromObjects(beforeSnapshotConstructor(raw), snapshotConstructor(raw)); +} + +export class DocumentBuilder { + constructor(private triggerResource: () => string, private options: DeploymentOptions) { + // TODO what validation do we want to do here? + } + + /** Respond to all document writes (creates, updates, or deletes). */ + onWrite( + handler: ( + change: Change, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction> { + return this.onOperation(handler, "document.write", changeConstructor); + } + + /** Respond only to document updates. */ + onUpdate( + handler: ( + change: Change, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction> { + return this.onOperation(handler, "document.update", changeConstructor); + } + + /** Respond only to document creations. */ + onCreate( + handler: ( + snapshot: QueryDocumentSnapshot, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "document.create", snapshotConstructor); + } + + /** Respond only to document deletions. */ + onDelete( + handler: ( + snapshot: QueryDocumentSnapshot, + context: EventContext> + ) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "document.delete", beforeSnapshotConstructor); + } + + private onOperation( + handler: (data: T, context: EventContext>) => PromiseLike | any, + eventType: string, + dataConstructor: (raw: Event) => any + ): CloudFunction { + return makeCloudFunction({ + handler, + provider: provider, + eventType, + service: service, + triggerResource: this.triggerResource, + legacyEventType: `providers/cloud.firestore/eventTypes/${eventType}`, + dataConstructor, + options: this.options, + }); + } +} diff --git a/src/v1/providers/https.ts b/src/v1/providers/https.ts new file mode 100644 index 000000000..739c9e001 --- /dev/null +++ b/src/v1/providers/https.ts @@ -0,0 +1,142 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; + +import { convertIfPresent, convertInvoker } from "../../common/encoding"; +import { + CallableContext, + FunctionsErrorCode, + HttpsError, + onCallHandler, + Request, + withErrorHandler, +} from "../../common/providers/https"; +import { HttpsFunction, optionsToEndpoint, optionsToTrigger, Runnable } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; +import { initV1Endpoint } from "../../runtime/manifest"; +import { withInit } from "../../common/onInit"; +import { wrapTraceContext } from "../../v2/trace"; + +export { HttpsError }; +export type { Request, CallableContext, FunctionsErrorCode }; + +/** + * Handle HTTP requests. + * @param handler A function that takes a request and response object, + * same signature as an Express app. + */ +export function onRequest( + handler: (req: Request, resp: express.Response) => void | Promise +): HttpsFunction { + return _onRequestWithOptions(handler, {}); +} + +/** + * Declares a callable method for clients to call using a Firebase SDK. + * @param handler A method that takes a data and context and returns a value. + */ +export function onCall( + handler: (data: any, context: CallableContext) => any | Promise +): HttpsFunction & Runnable { + return _onCallWithOptions(handler, {}); +} + +/** @internal */ +export function _onRequestWithOptions( + handler: (req: Request, resp: express.Response) => void | Promise, + options: DeploymentOptions +): HttpsFunction { + // lets us add __endpoint without altering handler: + const cloudFunction: any = (req: Request, res: express.Response) => { + return wrapTraceContext(withInit(withErrorHandler(handler)))(req, res); + }; + cloudFunction.__trigger = { + ...optionsToTrigger(options), + httpsTrigger: {}, + }; + convertIfPresent( + cloudFunction.__trigger.httpsTrigger, + options, + "invoker", + "invoker", + convertInvoker + ); + // TODO parse the options + + cloudFunction.__endpoint = { + platform: "gcfv1", + ...initV1Endpoint(options), + ...optionsToEndpoint(options), + httpsTrigger: {}, + }; + convertIfPresent( + cloudFunction.__endpoint.httpsTrigger, + options, + "invoker", + "invoker", + convertInvoker + ); + return cloudFunction; +} + +/** @internal */ +export function _onCallWithOptions( + handler: (data: any, context: CallableContext) => any | Promise, + options: DeploymentOptions +): HttpsFunction & Runnable { + // fix the length of handler to make the call to handler consistent + // in the onCallHandler + const fixedLen = (data: any, context: CallableContext) => { + return withInit(handler)(data, context); + }; + const func: any = wrapTraceContext( + onCallHandler( + { + enforceAppCheck: options.enforceAppCheck, + consumeAppCheckToken: options.consumeAppCheckToken, + cors: { origin: true, methods: "POST" }, + }, + fixedLen, + "gcfv1" + ) + ); + + func.__trigger = { + labels: {}, + ...optionsToTrigger(options), + httpsTrigger: {}, + }; + func.__trigger.labels["deployment-callable"] = "true"; + + func.__endpoint = { + platform: "gcfv1", + labels: {}, + ...initV1Endpoint(options), + ...optionsToEndpoint(options), + callableTrigger: {}, + }; + + func.run = fixedLen; + + return func; +} diff --git a/src/v1/providers/pubsub.ts b/src/v1/providers/pubsub.ts new file mode 100644 index 000000000..57a28803c --- /dev/null +++ b/src/v1/providers/pubsub.ts @@ -0,0 +1,206 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { CloudFunction, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions, ScheduleRetryConfig } from "../function-configuration"; + +/** @internal */ +export const provider = "google.pubsub"; +/** @internal */ +export const service = "pubsub.googleapis.com"; + +/** + * Registers a Cloud Function triggered when a Google Cloud Pub/Sub message + * is sent to a specified topic. + * + * @param topic - The Pub/Sub topic to watch for message events. + * @returns Pub/Sub topic builder interface. + */ +export function topic(topic: string) { + return _topicWithOptions(topic, {}); +} + +/** @internal */ +export function _topicWithOptions(topic: string, options: DeploymentOptions): TopicBuilder { + if (topic.indexOf("/") !== -1) { + throw new Error("Topic name may not have a /"); + } + + return new TopicBuilder(() => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + return `projects/${process.env.GCLOUD_PROJECT}/topics/${topic}`; + }, options); +} + +/** + * The Google Cloud Pub/Sub topic builder. + * + * Access via `functions.pubsub.topic()`. + */ +export class TopicBuilder { + /** @hidden */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** + * Event handler that fires every time a Cloud Pub/Sub message is + * published. + * + * @param handler - Event handler that runs every time a Cloud Pub/Sub message + * is published. + * @returns A function that you can export and deploy. + */ + onPublish( + handler: (message: Message, context: EventContext) => PromiseLike | any + ): CloudFunction { + return makeCloudFunction({ + handler, + provider, + service, + triggerResource: this.triggerResource, + eventType: "topic.publish", + dataConstructor: (raw) => new Message(raw.data), + options: this.options, + }); + } +} + +/** + * Registers a Cloud Function to run at specified times. + * + * @param schedule - The schedule, in Unix Crontab or AppEngine syntax. + * @returns ScheduleBuilder interface. + */ +export function schedule(schedule: string): ScheduleBuilder { + return _scheduleWithOptions(schedule, {}); +} + +/** @internal */ +export function _scheduleWithOptions( + schedule: string, + options: DeploymentOptions +): ScheduleBuilder { + const triggerResource = () => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + // The CLI will append the correct topic name based on region and function name + return `projects/${process.env.GCLOUD_PROJECT}/topics`; + }; + return new ScheduleBuilder(triggerResource, { + ...options, + schedule: { schedule }, + }); +} + +/** + * The builder for scheduled functions, which are powered by + * Google Pub/Sub and Cloud Scheduler. Describes the Cloud Scheduler + * job that is deployed to trigger a scheduled function at the provided + * frequency. For more information, see + * [Schedule functions](/docs/functions/schedule-functions). + * + * Access via `functions.pubsub.schedule()`. + */ +export class ScheduleBuilder { + /** @hidden */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + retryConfig(config: ScheduleRetryConfig): ScheduleBuilder { + this.options.schedule.retryConfig = config; + return this; + } + + timeZone(timeZone: string): ScheduleBuilder { + this.options.schedule.timeZone = timeZone; + return this; + } + + /** + * Event handler for scheduled functions. Triggered whenever the associated + * scheduler job sends a Pub/Sub message. + * + * @param handler - Handler that fires whenever the associated + * scheduler job sends a Pub/Sub message. + * @returns A function that you can export and deploy. + */ + onRun(handler: (context: EventContext) => PromiseLike | any) { + const cloudFunction = makeCloudFunction({ + contextOnlyHandler: handler, + provider, + service, + triggerResource: this.triggerResource, + eventType: "topic.publish", + options: this.options, + labels: { "deployment-scheduled": "true" }, + }); + return cloudFunction; + } +} + +/** + * Interface representing a Google Cloud Pub/Sub message. + * + * @param data - Payload of a Pub/Sub message. + */ +export class Message { + /** + * The data payload of this message object as a base64-encoded string. + */ + readonly data: string; + + /** + * User-defined attributes published with the message, if any. + */ + readonly attributes: { [key: string]: string }; + + /** @hidden */ + private _json: any; + + constructor(data: any) { + [this.data, this.attributes, this._json] = [data.data, data.attributes || {}, data.json]; + } + + /** + * The JSON data payload of this message object, if any. + */ + get json(): any { + if (typeof this._json === "undefined") { + this._json = JSON.parse(Buffer.from(this.data, "base64").toString("utf8")); + } + + return this._json; + } + + /** + * Returns a JSON-serializable representation of this object. + * + * @returns A JSON-serializable representation of this object. + */ + toJSON(): any { + return { + data: this.data, + attributes: this.attributes, + }; + } +} diff --git a/src/v1/providers/remoteConfig.ts b/src/v1/providers/remoteConfig.ts new file mode 100644 index 000000000..cf67383dc --- /dev/null +++ b/src/v1/providers/remoteConfig.ts @@ -0,0 +1,141 @@ +// The MIT License (MIT) +// +// Copyright (c) 2018 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { CloudFunction, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +/** @internal */ +export const provider = "google.firebase.remoteconfig"; +/** @internal */ +export const service = "firebaseremoteconfig.googleapis.com"; + +/** + * Registers a function that triggers on Firebase Remote Config template + * update events. + * + * @param handler A function that takes the updated Remote Config + * template version metadata as an argument. + * + * @returns A function that you can export and deploy. + */ +export function onUpdate( + handler: (version: TemplateVersion, context: EventContext) => PromiseLike | any +): CloudFunction { + return _onUpdateWithOptions(handler, {}); +} + +/** @internal */ +export function _onUpdateWithOptions( + handler: (version: TemplateVersion, context: EventContext) => PromiseLike | any, + options: DeploymentOptions +): CloudFunction { + const triggerResource = () => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + return `projects/${process.env.GCLOUD_PROJECT}`; + }; + return new UpdateBuilder(triggerResource, options).onUpdate(handler); +} + +/** Builder used to create Cloud Functions for Remote Config. */ +export class UpdateBuilder { + /** @internal */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** + * Handle all updates (including rollbacks) that affect a Remote Config + * project. + * @param handler A function that takes the updated Remote Config template + * version metadata as an argument. + */ + onUpdate( + handler: (version: TemplateVersion, context: EventContext) => PromiseLike | any + ): CloudFunction { + return makeCloudFunction({ + handler, + provider, + service, + triggerResource: this.triggerResource, + eventType: "update", + options: this.options, + }); + } +} + +/** + * An interface representing a Remote Config template version metadata object + * emitted when a project is updated. + */ +export interface TemplateVersion { + /** The version number of the updated Remote Config template. */ + versionNumber: number; + + /** When the template was updated in format (ISO8601 timestamp). */ + updateTime: string; + + /** + * Metadata about the account that performed the update, of + * type [`RemoteConfigUser`](/docs/reference/remote-config/rest/v1/Version#remoteconfiguser). + */ + updateUser: RemoteConfigUser; + + /** A description associated with this Remote Config template version. */ + description: string; + + /** + * The origin of the caller - either the Firebase console or the Remote Config + * REST API. See [`RemoteConfigUpdateOrigin`](/docs/reference/remote-config/rest/v1/Version#remoteconfigupdateorigin) + * for valid values. + */ + updateOrigin: string; + + /** + * The type of update action that was performed, whether forced, + * incremental, or a rollback operation. See + * [`RemoteConfigUpdateType`](/docs/reference/remote-config/rest/v1/Version#remoteconfigupdatetype) + * for valid values. + */ + updateType: string; + + /** + * The version number of the Remote Config template that this update rolled back to. + * Only applies if this update was a rollback. + */ + rollbackSource?: number; +} + +/** + * An interface representing metadata for a Remote Config account + * that performed the update. Contains the same fields as + * [`RemoteConfigUser`](/docs/reference/remote-config/rest/v1/Version#remoteconfiguser). + */ +export interface RemoteConfigUser { + /** Name of the Remote Config account that performed the update. */ + name?: string; + + /** Email address of the Remote Config account that performed the update. */ + email: string; + + /** Image URL of the Remote Config account that performed the update. */ + imageUrl?: string; +} diff --git a/src/v1/providers/storage.ts b/src/v1/providers/storage.ts new file mode 100644 index 000000000..998760eb6 --- /dev/null +++ b/src/v1/providers/storage.ts @@ -0,0 +1,343 @@ +// The MIT License (MIT) +// +// Copyright (c) 2017 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { firebaseConfig } from "../../common/config"; +import { CloudFunction, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +/** @internal */ +export const provider = "google.storage"; +/** @internal */ +export const service = "storage.googleapis.com"; + +/** + * Registers a Cloud Function scoped to a specific storage bucket. + * + * @param bucket Name of the bucket to which this Cloud Function is + * scoped. + * + * @returns Storage bucket builder interface. + */ +export function bucket(bucket?: string) { + return _bucketWithOptions({}, bucket); +} + +/** + * Registers a Cloud Function scoped to the default storage bucket for the + * project. + * + * @returns Storage object builder interface. + */ +export function object() { + return _objectWithOptions({}); +} + +/** @internal */ +export function _bucketWithOptions(options: DeploymentOptions, bucket?: string): BucketBuilder { + const resourceGetter = () => { + bucket = bucket || firebaseConfig().storageBucket; + if (!bucket) { + throw new Error( + "Missing bucket name. If you are unit testing, please provide a bucket name" + + " through `functions.storage.bucket(bucketName)`, or set process.env.FIREBASE_CONFIG." + ); + } + if (!/^[a-z\d][a-z\d\\._-]{1,230}[a-z\d]$/.test(bucket)) { + throw new Error(`Invalid bucket name ${bucket}`); + } + return `projects/_/buckets/${bucket}`; + }; + return new BucketBuilder(resourceGetter, options); +} + +/** @internal */ +export function _objectWithOptions(options: DeploymentOptions): ObjectBuilder { + return _bucketWithOptions(options).object(); +} + +/** + * The Google Cloud Storage bucket builder interface. + * + * Access via `functions.storage.bucket()`. + */ +export class BucketBuilder { + /** @internal */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** + * Event handler which fires every time a Google Cloud Storage change occurs. + * + * @returns Storage object builder interface scoped to the specified storage + * bucket. + */ + object() { + return new ObjectBuilder(this.triggerResource, this.options); + } +} + +/** + * The Google Cloud Storage object builder interface. + * + * Access via `functions.storage.object()`. + */ +export class ObjectBuilder { + /** @internal */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** + * Event handler sent only when a bucket has enabled object versioning. + * This event indicates that the live version of an object has become an + * archived version, either because it was archived or because it was + * overwritten by the upload of an object of the same name. + * + * @param handler Event handler which is run every time a Google Cloud Storage + * archival occurs. + * + * @returns A function which you can export and deploy. + */ + onArchive( + handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "object.archive"); + } + + /** + * Event handler which fires every time a Google Cloud Storage deletion occurs. + * + * Sent when an object has been permanently deleted. This includes objects + * that are overwritten or are deleted as part of the bucket's lifecycle + * configuration. For buckets with object versioning enabled, this is not + * sent when an object is archived, even if archival occurs + * via the `storage.objects.delete` method. + * + * @param handler Event handler which is run every time a Google Cloud Storage + * deletion occurs. + * + * @returns A function which you can export and deploy. + */ + onDelete( + handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "object.delete"); + } + + /** + * Event handler which fires every time a Google Cloud Storage object + * creation occurs. + * + * Sent when a new object (or a new generation of an existing object) + * is successfully created in the bucket. This includes copying or rewriting + * an existing object. A failed upload does not trigger this event. + * + * @param handler Event handler which is run every time a Google Cloud Storage + * object creation occurs. + * + * @returns A function which you can export and deploy. + */ + onFinalize( + handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "object.finalize"); + } + + /** + * Event handler which fires every time the metadata of an existing object + * changes. + * + * @param handler Event handler which is run every time a Google Cloud Storage + * metadata update occurs. + * + * @returns A function which you can export and deploy. + */ + onMetadataUpdate( + handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any + ): CloudFunction { + return this.onOperation(handler, "object.metadataUpdate"); + } + + /** @hidden */ + private onOperation( + handler: (object: ObjectMetadata, context: EventContext) => PromiseLike | any, + eventType: string + ): CloudFunction { + return makeCloudFunction({ + handler, + provider, + service, + eventType, + triggerResource: this.triggerResource, + options: this.options, + }); + } +} + +/** Interface representing a Google Google Cloud Storage object metadata object. */ +export interface ObjectMetadata { + /** The kind of the object, which is always `storage#object`. */ + kind: string; + + /** + * The ID of the object, including the bucket name, object name, and + * generation number. + */ + id: string; + + /** Storage bucket that contains the object. */ + bucket: string; + + /** Storage class of the object. */ + storageClass: string; + + /** + * The value of the `Content-Length` header, used to determine the length of + * the object data in bytes. + */ + size: string; + + /** The creation time of the object in RFC 3339 format. */ + timeCreated: string; + + /** + * The modification time of the object metadata in RFC 3339 format. + */ + updated: string; + + /** Link to access the object, assuming you have sufficient permissions. */ + selfLink?: string; + + /** The object's name. */ + name?: string; + + /** + * Generation version number that changes each time the object is + * overwritten. + */ + generation?: string; + + /** The object's content type, also known as the MIME type. */ + contentType?: string; + + /** + * Meta-generation version number that changes each time the object's metadata + * is updated. + */ + metageneration?: string; + + /** + * The deletion time of the object in RFC 3339 format. Returned + * only if this version of the object has been deleted. + */ + timeDeleted?: string; + + timeStorageClassUpdated?: string; + + /** + * MD5 hash for the object. All Google Cloud Storage objects + * have a CRC32C hash or MD5 hash. + */ + md5Hash?: string; + + /** Media download link. */ + mediaLink?: string; + + /** + * Content-Encoding to indicate that an object is compressed + * (for example, with gzip compression) while maintaining its Content-Type. + */ + contentEncoding?: string; + + /** + * The value of the `Content-Disposition` header, used to specify presentation + * information about the data being transmitted. + */ + contentDisposition?: string; + + /** ISO 639-1 language code of the content. */ + contentLanguage?: string; + + /** + * The value of the `Cache-Control` header, used to determine whether Internet + * caches are allowed to cache public data for an object. + */ + cacheControl?: string; + + /** User-provided metadata. */ + metadata?: { + [key: string]: string; + }; + + acl?: [ + { + kind?: string; + id?: string; + selfLink?: string; + bucket?: string; + object?: string; + generation?: string; + entity?: string; + role?: string; + email?: string; + entityId?: string; + domain?: string; + projectTeam?: { + projectNumber?: string; + team?: string; + }; + etag?: string; + } + ]; + + owner?: { + entity?: string; + entityId?: string; + }; + + /** + * The object's CRC32C hash. All Google Cloud Storage objects + * have a CRC32C hash or MD5 hash. + */ + crc32c?: string; + + /** + * Specifies the number of originally uploaded objects from which + * a composite object was created. + */ + componentCount?: string; + + etag?: string; + + /** + * Customer-supplied encryption key. + * + * This object contains the following properties: + * * `encryptionAlgorithm` (`string|undefined`): The encryption algorithm that + * was used. Always contains the value `AES256`. + * * `keySha256` (`string|undefined`): An RFC 4648 base64-encoded string of the + * SHA256 hash of your encryption key. You can use this SHA256 hash to + * uniquely identify the AES-256 encryption key required to decrypt the + * object, which you must store securely. + */ + customerEncryption?: { + encryptionAlgorithm?: string; + keySha256?: string; + }; +} diff --git a/src/v1/providers/tasks.ts b/src/v1/providers/tasks.ts new file mode 100644 index 000000000..0be9176ab --- /dev/null +++ b/src/v1/providers/tasks.ts @@ -0,0 +1,175 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; + +import { convertIfPresent, convertInvoker, copyIfPresent } from "../../common/encoding"; +import { Request } from "../../common/providers/https"; +import { + onDispatchHandler, + RateLimits, + RetryConfig, + TaskContext, +} from "../../common/providers/tasks"; +import { + initV1Endpoint, + initTaskQueueTrigger, + ManifestEndpoint, + ManifestRequiredAPI, +} from "../../runtime/manifest"; +import { optionsToEndpoint, optionsToTrigger } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +export type { RetryConfig, RateLimits, TaskContext }; + +/** + * Options for configuring the task queue to listen to. + */ +export interface TaskQueueOptions { + /** How a task should be retried in the event of a non-2xx return. */ + retryConfig?: RetryConfig; + /** How congestion control should be applied to the function. */ + rateLimits?: RateLimits; + + /** + * Who can enqueue tasks for this function. + * If left unspecified, only service accounts which have + * `roles/cloudtasks.enqueuer` and `roles/cloudfunctions.invoker` + * will have permissions. + */ + invoker?: "private" | string | string[]; +} + +/** + * A handler for tasks. + */ +export interface TaskQueueFunction { + (req: Request, res: express.Response): Promise; + + /** @alpha */ + __trigger: unknown; + + /** @alpha */ + __endpoint: ManifestEndpoint; + + /** @alpha */ + __requiredAPIs?: ManifestRequiredAPI[]; + + /** + * The callback passed to the `TaskQueueFunction` constructor. + * @param data - The body enqueued into a task queue. + * @param context - The request context of the enqueued task + * @returns Any return value. Google Cloud Functions will await any promise + * before shutting down your function. Resolved return values + * are only used for unit testing purposes. + */ + run(data: any, context: TaskContext): void | Promise; +} + +/** + * Builder for creating a `TaskQueueFunction`. + */ +export class TaskQueueBuilder { + /** @internal */ + constructor( + private readonly tqOpts?: TaskQueueOptions, + private readonly depOpts?: DeploymentOptions + ) {} + + /** + * Creates a handler for tasks sent to a Google Cloud Tasks queue. + * @param handler - A callback to handle task requests. + * @returns A function you can export and deploy. + */ + onDispatch( + handler: (data: any, context: TaskContext) => void | Promise + ): TaskQueueFunction { + // onEnqueueHandler sniffs the function length of the passed-in callback + // and the user could have only tried to listen to data. Wrap their handler + // in another handler to avoid accidentally triggering the v2 API + const fixedLen = (data: any, context: TaskContext) => handler(data, context); + const func: any = onDispatchHandler(fixedLen); + + func.__trigger = { + ...optionsToTrigger(this.depOpts || {}), + taskQueueTrigger: {}, + }; + copyIfPresent(func.__trigger.taskQueueTrigger, this.tqOpts, "retryConfig"); + copyIfPresent(func.__trigger.taskQueueTrigger, this.tqOpts, "rateLimits"); + convertIfPresent( + func.__trigger.taskQueueTrigger, + this.tqOpts, + "invoker", + "invoker", + convertInvoker + ); + + func.__endpoint = { + platform: "gcfv1", + ...initV1Endpoint(this.depOpts), + ...optionsToEndpoint(this.depOpts), + taskQueueTrigger: initTaskQueueTrigger(this.depOpts), + }; + copyIfPresent( + func.__endpoint.taskQueueTrigger.retryConfig, + this.tqOpts?.retryConfig || {}, + "maxAttempts", + "maxBackoffSeconds", + "maxDoublings", + "maxRetrySeconds", + "minBackoffSeconds" + ); + copyIfPresent( + func.__endpoint.taskQueueTrigger.rateLimits, + this.tqOpts?.rateLimits || {}, + "maxConcurrentDispatches", + "maxDispatchesPerSecond" + ); + convertIfPresent( + func.__endpoint.taskQueueTrigger, + this.tqOpts, + "invoker", + "invoker", + convertInvoker + ); + + func.__requiredAPIs = [ + { + api: "cloudtasks.googleapis.com", + reason: "Needed for task queue functions", + }, + ]; + + func.run = handler; + + return func; + } +} + +/** + * Declares a function that can handle tasks enqueued using the Firebase Admin SDK. + * @param options - Configuration for the Task Queue that feeds into this function. + * Omitting options will configure a Task Queue with default settings. + */ +export function taskQueue(options?: TaskQueueOptions): TaskQueueBuilder { + return new TaskQueueBuilder(options); +} diff --git a/src/v1/providers/testLab.ts b/src/v1/providers/testLab.ts new file mode 100644 index 000000000..ae9f9e584 --- /dev/null +++ b/src/v1/providers/testLab.ts @@ -0,0 +1,316 @@ +// The MIT License (MIT) +// +// Copyright (c) 2019 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { CloudFunction, Event, EventContext, makeCloudFunction } from "../cloud-functions"; +import { DeploymentOptions } from "../function-configuration"; + +/** @internal */ +export const PROVIDER = "google.testing"; +/** @internal */ +export const SERVICE = "testing.googleapis.com"; +/** @internal */ +export const TEST_MATRIX_COMPLETE_EVENT_TYPE = "testMatrix.complete"; + +/** Handle events related to Test Lab test matrices. */ +export function testMatrix() { + return _testMatrixWithOpts({}); +} + +/** @internal */ +export function _testMatrixWithOpts(opts: DeploymentOptions) { + return new TestMatrixBuilder(() => { + if (!process.env.GCLOUD_PROJECT) { + throw new Error("process.env.GCLOUD_PROJECT is not set."); + } + return "projects/" + process.env.GCLOUD_PROJECT + "/testMatrices/{matrix}"; + }, opts); +} + +/** Builder used to create Cloud Functions for Test Lab test matrices events. */ +export class TestMatrixBuilder { + /** @internal */ + constructor(private triggerResource: () => string, private options: DeploymentOptions) {} + + /** Handle a TestMatrix that reached a final test state. */ + onComplete( + handler: (testMatrix: TestMatrix, context: EventContext) => PromiseLike | any + ): CloudFunction { + const dataConstructor = (raw: Event) => { + return new TestMatrix(raw.data); + }; + return makeCloudFunction({ + provider: PROVIDER, + eventType: TEST_MATRIX_COMPLETE_EVENT_TYPE, + triggerResource: this.triggerResource, + service: SERVICE, + dataConstructor, + handler, + options: this.options, + }); + } +} + +/** TestMatrix captures details about a test run. */ +export class TestMatrix { + /** Unique id set by the service. */ + testMatrixId: string; + + /** When this test matrix was initially created (ISO8601 timestamp). */ + createTime: string; + + /** Indicates the current progress of the test matrix */ + state: TestState; + + /** + * The overall outcome of the test matrix run. Only set when the test matrix + * state is FINISHED. + */ + outcomeSummary?: OutcomeSummary; + + /** For 'INVALID' matrices only, describes why the matrix is invalid. */ + invalidMatrixDetails?: InvalidMatrixDetails; + + /** Where the results for the matrix are located. */ + resultStorage: ResultStorage; + + /** Information about the client which invoked the test. */ + clientInfo: ClientInfo; + + /** @internal */ + constructor(data: any) { + this.testMatrixId = data.testMatrixId; + this.createTime = data.timestamp; + this.state = data.state; + this.outcomeSummary = data.outcomeSummary; + this.invalidMatrixDetails = data.invalidMatrixDetails; + this.resultStorage = new ResultStorage(data.resultStorage); + this.clientInfo = new ClientInfo(data.clientInfo); + } +} + +/** Information about the client which invoked the test. */ +export class ClientInfo { + /** Client name, e.g. 'gcloud'. */ + name: string; + + /** Map of detailed information about the client which invoked the test. */ + details: { [key: string]: string }; + + /** @internal */ + constructor(data?: { name: string; clientInfoDetails?: Array<{ key: string; value?: string }> }) { + this.name = data?.name || ""; + this.details = {}; + for (const detail of data?.clientInfoDetails || []) { + this.details[detail.key] = detail.value || ""; + } + } +} + +/** Locations where the test results are stored. */ +export class ResultStorage { + /** A storage location within Google Cloud Storage (GCS) for the test artifacts. */ + gcsPath?: string; + + /** Id of the ToolResults History containing these results. */ + toolResultsHistoryId?: string; + + /** + * Id of the ToolResults execution that the detailed TestMatrix results are + * written to. + */ + toolResultsExecutionId?: string; + + /** URL to test results in Firebase Console. */ + resultsUrl?: string; + + /** @internal */ + constructor(data?: any) { + this.gcsPath = data?.googleCloudStorage?.gcsPath; + this.toolResultsHistoryId = data?.toolResultsHistory?.historyId; + this.toolResultsExecutionId = data?.toolResultsExecution?.executionId; + this.resultsUrl = data?.resultsUrl; + } +} + +/** + * The detailed reason that a Matrix was deemed INVALID. + * + * @remarks + * Possible values: + * + * - `DETAILS_UNAVAILABLE`: The matrix is INVALID, but there are no further details available. + * + * - `MALFORMED_APK`: The input app APK could not be parsed. + * + * - `MALFORMED_TEST_APK`: The input test APK could not be parsed. + * + * - `NO_MANIFEST`: The AndroidManifest.xml could not be found. + * + * - `NO_PACKAGE_NAME`: The APK manifest does not declare a package name. + * + * - `INVALID_PACKAGE_NAME`: The APK application ID is invalid. + * + * - `TEST_SAME_AS_APP`: The test package and app package are the same. + * + * - `NO_INSTRUMENTATION`: The test apk does not declare an instrumentation. + * + * - `NO_SIGNATURE`: The input app apk does not have a signature. + * + * - `INSTRUMENTATION_ORCHESTRATOR_INCOMPATIBLE`: The test runner class specified by + * user or in the test APK`s manifest file is not compatible with Android Test Orchestrator. + * + * - `NO_TEST_RUNNER_CLASS`: The test APK does not contain the test runner class + * specified by user or in the manifest file. + * + * - `NO_LAUNCHER_ACTIVITY`: A main launcher activity could not be found. + * + * - `FORBIDDEN_PERMISSIONS`: The app declares one or more permissions that are + * not allowed. + * + * - `INVALID_ROBO_DIRECTIVES`: There is a conflict in the provided + * robo_directives. + * + * - `INVALID_RESOURCE_NAME`: There is at least one invalid resource name in the + * provided robo directives. + * + * - `INVALID_DIRECTIVE_ACTION`: Invalid definition of action in the robo + * directives, e.g. a click or ignore action includes an input text field. + * + * - `TEST_LOOP_INTENT_FILTER_NOT_FOUND`: There is no test loop intent filter, + * or the one that is given is not formatted correctly. + * + * - `SCENARIO_LABEL_NOT_DECLARED`: The request contains a scenario label that + * was not declared in the manifest. + * + * - `SCENARIO_LABEL_MALFORMED`: There was an error when parsing a label value. + * + * - `SCENARIO_NOT_DECLARED`: The request contains a scenario number that was + * not declared in the manifest. + * + * - `DEVICE_ADMIN_RECEIVER`: Device administrator applications are not allowed. + * + * - `MALFORMED_XC_TEST_ZIP`: The zipped XCTest was malformed. The zip did not ] + * contain a single .xctestrun file and the contents of the + * DerivedData/Build/Products directory. + * + * - `BUILT_FOR_IOS_SIMULATOR`: The zipped XCTest was built for the iOS + * simulator rather than for a physical device. + * + * - `NO_TESTS_IN_XC_TEST_ZIP`: The .xctestrun file did not specify any test + * targets. + * + * - `USE_DESTINATION_ARTIFACTS`: One or more of the test targets defined in the + * .xctestrun file specifies "UseDestinationArtifacts", which is disallowed. + * + * - `TEST_NOT_APP_HOSTED`: XC tests which run on physical devices must have + * "IsAppHostedTestBundle" == "true" in the xctestrun file. + * + * - `PLIST_CANNOT_BE_PARSED`: An Info.plist file in the XCTest zip could not be + * parsed. + * + * - `NO_CODE_APK`: APK contains no code. + * + * - `INVALID_INPUT_APK`: Either the provided input APK path was malformed, the + * APK file does not exist, or the user does not have permission to access the + * APK file. + * + * - `INVALID_APK_PREVIEW_SDK`: APK is built for a preview SDK which is + * unsupported. + */ +export type InvalidMatrixDetails = + | "DETAILS_UNAVAILABLE" + | "MALFORMED_APK" + | "MALFORMED_TEST_APK" + | "NO_MANIFEST" + | "NO_PACKAGE_NAME" + | "INVALID_PACKAGE_NAME" + | "TEST_SAME_AS_APP" + | "NO_INSTRUMENTATION" + | "NO_SIGNATURE" + | "INSTRUMENTATION_ORCHESTRATOR_INCOMPATIBLE" + | "NO_TEST_RUNNER_CLASS" + | "NO_LAUNCHER_ACTIVITY" + | "FORBIDDEN_PERMISSIONS" + | "INVALID_ROBO_DIRECTIVES" + | "INVALID_RESOURCE_NAME" + | "INVALID_DIRECTIVE_ACTION" + | "TEST_LOOP_INTENT_FILTER_NOT_FOUND" + | "SCENARIO_LABEL_NOT_DECLARED" + | "SCENARIO_LABEL_MALFORMED" + | "SCENARIO_NOT_DECLARED" + | "DEVICE_ADMIN_RECEIVER" + | "MALFORMED_XC_TEST_ZIP" + | "BUILT_FOR_IOS_SIMULATOR" + | "NO_TESTS_IN_XC_TEST_ZIP" + | "USE_DESTINATION_ARTIFACTS" + | "TEST_NOT_APP_HOSTED" + | "PLIST_CANNOT_BE_PARSED" + | "NO_CODE_APK" + | "INVALID_INPUT_APK" + | "INVALID_APK_PREVIEW_SDK"; + +/** + * The state (i.e. progress) of a TestMatrix. + * + * @remarks + * Possible values: + * + * - `VALIDATING`: The matrix is being validated. + * + * - `PENDING`: The matrix is waiting for resources to become available. + * + * - `FINISHED`: The matrix has terminated normally. This means that the matrix + * level processing completed normally, but individual executions may be in an + * ERROR state. + * + * - `ERROR`: The matrix has stopped because it encountered an infrastructure + * failure. + * + * - `INVALID`: The matrix was not run because the provided inputs are not + * valid. E.g. the input file is not of the expected type, or is + * malformed/corrupt. + */ +export type TestState = "VALIDATING" | "PENDING" | "FINISHED" | "ERROR" | "INVALID"; + +/** + * Outcome summary for a finished TestMatrix. + * + * @remarks + * Possible values: + * + * - `SUCCESS`: The test matrix run was successful, for instance: + * - All the test cases passed. + * - Robo did not detect a crash of the application under test. + * + * - `FAILURE`: The test run failed, for instance: + * - One or more test cases failed. + * - A test timed out. + * - The application under test crashed. + * + * - `INCONCLUSIVE`: Something unexpected happened. The run should still be + * considered unsuccessful but this is likely a transient problem and + * re-running the test might be successful. + * + * - `SKIPPED`: All tests were skipped, for instance: + * - All device configurations were incompatible. + */ +export type OutcomeSummary = "SUCCESS" | "FAILURE" | "INCONCLUSIVE" | "SKIPPED"; diff --git a/src/v2/core.ts b/src/v2/core.ts new file mode 100644 index 000000000..26987637d --- /dev/null +++ b/src/v2/core.ts @@ -0,0 +1,120 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Core functionality of the Cloud Functions for Firebase 2nd gen SDK. + * @packageDocumentation + */ + +import { Change } from "../common/change"; +import { ManifestEndpoint } from "../runtime/manifest"; + +export { Change }; + +export type { ParamsOf } from "../common/params"; +export { onInit } from "../common/onInit"; + +/** @internal */ +export interface TriggerAnnotation { + platform?: string; + concurrency?: number; + minInstances?: number; + maxInstances?: number; + availableMemoryMb?: number; + eventTrigger?: { + eventType: string; + resource: string; + service: string; + }; + failurePolicy?: { retry: boolean }; + httpsTrigger?: { + invoker?: string[]; + }; + labels?: { [key: string]: string }; + regions?: string[]; + timeout?: string; + vpcConnector?: string; + vpcConnectorEgressSettings?: string; + serviceAccountEmail?: string; + ingressSettings?: string; + secrets?: string[]; + blockingTrigger?: { + eventType: string; + options?: Record; + }; + // TODO: schedule +} + +/** + * A `CloudEventBase` is the base of a cross-platform format for encoding a serverless event. + * For more information, see https://github.com/cloudevents/spec. + * @typeParam T - The type of the event data. + * @beta + */ +export interface CloudEvent { + /** Version of the CloudEvents spec for this event. */ + readonly specversion: "1.0"; + + /** A globally unique ID for this event. */ + id: string; + + /** The resource that published this event. */ + source: string; + + /** The resource, provided by source, that this event relates to. */ + subject?: string; + + /** The type of event that this represents. */ + type: string; + + /** When this event occurred. */ + time: string; + + /** Information about this specific event. */ + data: T; +} + +/** + * A handler for CloudEvents. + * @typeParam EventType - The kind of event this function handles. + * Always a subclass of CloudEvent<> + * @beta + */ +export interface CloudFunction> { + (raw: CloudEvent): any | Promise; + + /** @alpha */ + __trigger?: unknown; + /** @alpha */ + __endpoint: ManifestEndpoint; + + /** + * The callback passed to the `CloudFunction` constructor. + * Use `run` to test a function. + * @param event - The parsed event to handle. + * @returns Any return value. Cloud Functions awaits any promise + * before shutting down your function. Resolved return values + * are only used for unit testing purposes. + * @beta + */ + run(event: EventType): any | Promise; +} diff --git a/src/v2/index.ts b/src/v2/index.ts new file mode 100644 index 000000000..a5139f1fc --- /dev/null +++ b/src/v2/index.ts @@ -0,0 +1,86 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * The 2nd gen API for Cloud Functions for Firebase. + * This SDK supports deep imports. For example, the namespace + * `pubsub` is available at `firebase-functions/v2` or is directly importable + * from `firebase-functions/v2/pubsub`. + * @packageDocumentation + */ + +import * as alerts from "./providers/alerts"; +import * as database from "./providers/database"; +import * as eventarc from "./providers/eventarc"; +import * as https from "./providers/https"; +import * as identity from "./providers/identity"; +import * as pubsub from "./providers/pubsub"; +import * as scheduler from "./providers/scheduler"; +import * as storage from "./providers/storage"; +import * as tasks from "./providers/tasks"; +import * as remoteConfig from "./providers/remoteConfig"; +import * as testLab from "./providers/testLab"; +import * as firestore from "./providers/firestore"; +import * as dataconnect from "./providers/dataconnect"; + +export { + alerts, + database, + storage, + https, + identity, + pubsub, + tasks, + eventarc, + scheduler, + remoteConfig, + testLab, + firestore, + dataconnect, +}; + +export { logger } from "../logger"; +export { setGlobalOptions } from "./options"; +export type { + GlobalOptions, + SupportedRegion, + MemoryOption, + VpcEgressSetting, + IngressSetting, + EventHandlerOptions, +} from "./options"; + +export { onInit } from "./core"; +export type { CloudFunction, CloudEvent, ParamsOf } from "./core"; +export { Change } from "../common/change"; +export { traceContext } from "../common/trace"; +// NOTE: Equivalent to `export * as params from "../params"` but api-extractor doesn't support that syntax. +import * as params from "../params"; +export { params }; + +// NOTE: Required to support the Functions Emulator which monkey patches `functions.config()` +// TODO(danielylee): Remove in next major release. +export { config } from "../v1/config"; + +// Required for v1 Emulator support. +import { setApp as setEmulatedAdminApp } from "../common/app"; +export const app = { setEmulatedAdminApp }; diff --git a/src/v2/options.ts b/src/v2/options.ts new file mode 100644 index 000000000..608db1fa4 --- /dev/null +++ b/src/v2/options.ts @@ -0,0 +1,418 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Options to configure cloud functions. + * @packageDocumentation + */ + +import { + convertIfPresent, + copyIfPresent, + durationFromSeconds, + serviceAccountFromShorthand, +} from "../common/encoding"; +import { RESET_VALUE, ResetValue } from "../common/options"; +import { ManifestEndpoint } from "../runtime/manifest"; +import { TriggerAnnotation } from "./core"; +import { declaredParams, Expression } from "../params"; +import { ParamSpec, SecretParam } from "../params/types"; +import { HttpsOptions } from "./providers/https"; +import * as logger from "../logger"; + +export { RESET_VALUE } from "../common/options"; + +/** + * List of all regions supported by Cloud Functions (2nd gen). + */ +export type SupportedRegion = + | "asia-east1" + | "asia-northeast1" + | "asia-northeast2" + | "europe-north1" + | "europe-west1" + | "europe-west4" + | "us-central1" + | "us-east1" + | "us-east4" + | "us-west1" + | "asia-east2" + | "asia-northeast3" + | "asia-southeast1" + | "asia-southeast2" + | "asia-south1" + | "australia-southeast1" + | "europe-central2" + | "europe-west2" + | "europe-west3" + | "europe-west6" + | "northamerica-northeast1" + | "southamerica-east1" + | "us-west2" + | "us-west3" + | "us-west4"; + +/** + * List of available memory options supported by Cloud Functions. + */ +export type MemoryOption = + | "128MiB" + | "256MiB" + | "512MiB" + | "1GiB" + | "2GiB" + | "4GiB" + | "8GiB" + | "16GiB" + | "32GiB"; + +const MemoryOptionToMB: Record = { + "128MiB": 128, + "256MiB": 256, + "512MiB": 512, + "1GiB": 1024, + "2GiB": 2048, + "4GiB": 4096, + "8GiB": 8192, + "16GiB": 16384, + "32GiB": 32768, +}; + +/** + * List of available options for `VpcConnectorEgressSettings`. + */ +export type VpcEgressSetting = "PRIVATE_RANGES_ONLY" | "ALL_TRAFFIC"; + +/** + * List of available options for `IngressSettings`. + */ +export type IngressSetting = "ALLOW_ALL" | "ALLOW_INTERNAL_ONLY" | "ALLOW_INTERNAL_AND_GCLB"; + +/** + * `GlobalOptions` are options that can be set across an entire project. + * These options are common to HTTPS and event handling functions. + */ +export interface GlobalOptions { + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a 2nd gen function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes). + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Minimum number of actual instances to be running at a given time. + * + * @remarks + * Instances are billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances that can be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can be applied only to functions running on Cloud Functions (2nd gen)). + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Cloud Functions (1st gen). + * To revert to the CPU amounts used in gcloud or in Cloud Functions (1st gen), set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect a function to a specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: IngressSetting | ResetValue; + + /** + * Invoker to set access control on HTTPS functions. + */ + invoker?: "public" | "private" | string | string[]; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** + * Determines whether Firebase App Check is enforced. Defaults to false. + * + * @remarks + * When true, requests with invalid tokens autorespond with a 401 + * (Unauthorized) error. + * When false, requests with invalid tokens set `event.app` to `undefined`. + */ + enforceAppCheck?: boolean; + + /** + * Controls whether function configuration modified outside of function source is preserved. Defaults to false. + * + * @remarks + * When setting configuration available in an underlying platform that is not yet available in the Firebase SDK + * for Cloud Functions, we recommend setting `preserveExternalChanges` to `true`. Otherwise, when Google releases + * a new version of the SDK with support for the missing configuration, your function's manually configured setting + * may inadvertently be wiped out. + */ + preserveExternalChanges?: boolean; +} + +let globalOptions: GlobalOptions | undefined; + +/** + * Sets default options for all functions written using the 2nd gen SDK. + * @param options Options to set as default + */ +export function setGlobalOptions(options: GlobalOptions) { + if (globalOptions) { + logger.warn("Calling setGlobalOptions twice leads to undefined behavior"); + } + globalOptions = options; +} + +/** + * Get the currently set default options. + * Used only for trigger generation. + * @internal + */ +export function getGlobalOptions(): GlobalOptions { + return globalOptions || {}; +} + +/** + * Additional fields that can be set on any event-handling function. + */ +export interface EventHandlerOptions extends Omit { + /** Type of the event. */ + eventType?: string; + + /** + * Filters events based on exact matches on the CloudEvents attributes. + * + * Each key-value pair represents an attribute name and its required value for exact matching. + * Events must match all specified filters to trigger the function. + */ + eventFilters?: Record>; + + /** + * Filters events based on path pattern matching on the CloudEvents attributes. + * + * Similar to eventFilters, but supports wildcard patterns for flexible matching where `*` matches + * any single path segment, `**` matches zero or more path segments, and `{param}` captures a path segment + * as a parameter + */ + eventFilterPathPatterns?: Record>; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; + + /** Region of the EventArc trigger. */ + // region?: string | Expression | null; + region?: string | Expression | ResetValue; + + /** The service account that EventArc should use to invoke this function. Requires the P4SA to have ActAs permission on this service account. */ + serviceAccount?: string | Expression | ResetValue; + + /** The name of the channel where the function receives events. */ + channel?: string; +} + +/** + * Apply GlobalOptions to trigger definitions. + * @internal + */ +export function optionsToTriggerAnnotations( + opts: GlobalOptions | EventHandlerOptions | HttpsOptions +): TriggerAnnotation { + const annotation: TriggerAnnotation = {}; + copyIfPresent( + annotation, + opts, + "concurrency", + "minInstances", + "maxInstances", + "ingressSettings", + "labels", + "vpcConnector", + "vpcConnectorEgressSettings", + "secrets" + ); + convertIfPresent(annotation, opts, "availableMemoryMb", "memory", (mem: MemoryOption) => { + return MemoryOptionToMB[mem]; + }); + convertIfPresent(annotation, opts, "regions", "region", (region) => { + if (typeof region === "string") { + return [region]; + } + return region; + }); + convertIfPresent( + annotation, + opts, + "serviceAccountEmail", + "serviceAccount", + serviceAccountFromShorthand + ); + convertIfPresent(annotation, opts, "timeout", "timeoutSeconds", durationFromSeconds); + convertIfPresent( + annotation, + opts as any as EventHandlerOptions, + "failurePolicy", + "retry", + (retry: boolean) => { + return retry ? { retry: true } : null; + } + ); + + return annotation; +} + +/** + * Apply GlobalOptions to endpoint manifest. + * @internal + */ +export function optionsToEndpoint( + opts: GlobalOptions | EventHandlerOptions | HttpsOptions +): ManifestEndpoint { + const endpoint: ManifestEndpoint = {}; + copyIfPresent( + endpoint, + opts, + "omit", + "concurrency", + "minInstances", + "maxInstances", + "ingressSettings", + "labels", + "timeoutSeconds", + "cpu" + ); + convertIfPresent(endpoint, opts, "serviceAccountEmail", "serviceAccount"); + if (opts.vpcConnector !== undefined) { + if (opts.vpcConnector === null || opts.vpcConnector instanceof ResetValue) { + endpoint.vpc = RESET_VALUE; + } else { + const vpc: ManifestEndpoint["vpc"] = { connector: opts.vpcConnector }; + convertIfPresent(vpc, opts, "egressSettings", "vpcConnectorEgressSettings"); + endpoint.vpc = vpc; + } + } + convertIfPresent( + endpoint, + opts, + "availableMemoryMb", + "memory", + ( + mem: MemoryOption | Expression | ResetValue | null + ): number | Expression | null | ResetValue => { + return typeof mem === "object" ? mem : MemoryOptionToMB[mem]; + } + ); + convertIfPresent(endpoint, opts, "region", "region", (region) => { + if (typeof region === "string") { + return [region]; + } + return region; + }); + convertIfPresent( + endpoint, + opts, + "secretEnvironmentVariables", + "secrets", + (secrets: (string | SecretParam)[]) => + secrets.map((secret) => ({ key: secret instanceof SecretParam ? secret.name : secret })) + ); + + return endpoint; +} + +/** + * @hidden + * @alpha + */ +export function __getSpec(): { + globalOptions: GlobalOptions; + params: ParamSpec[]; +} { + return { + globalOptions: getGlobalOptions(), + params: declaredParams.map((p) => p.toSpec()), + }; +} diff --git a/src/v2/providers/alerts/alerts.ts b/src/v2/providers/alerts/alerts.ts new file mode 100644 index 000000000..e3b51c549 --- /dev/null +++ b/src/v2/providers/alerts/alerts.ts @@ -0,0 +1,300 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { initV2Endpoint, ManifestEndpoint } from "../../../runtime/manifest"; +import { ResetValue } from "../../../common/options"; +import { CloudEvent, CloudFunction } from "../../core"; +import { Expression } from "../../../params"; +import { wrapTraceContext } from "../../trace"; +import * as options from "../../options"; +import { SecretParam } from "../../../params/types"; +import { withInit } from "../../../common/onInit"; + +/** + * The CloudEvent data emitted by Firebase Alerts. + * @typeParam T - the payload type that is expected for this alert. + */ +export interface FirebaseAlertData { + /** Time that the event has created. */ + createTime: string; + /** Time that the event has ended. Optional, only present for ongoing alerts. */ + endTime: string; + /** Payload of the event, which includes the details of the specific alert. */ + payload: T; +} + +/** + * A custom CloudEvent for Firebase Alerts (with custom extension attributes). + * @typeParam T - the data type for this alert that is wrapped in a `FirebaseAlertData` object. + */ +export interface AlertEvent extends CloudEvent> { + /** The type of the alerts that got triggered. */ + alertType: string; + /** + * The Firebase App ID that’s associated with the alert. This is optional, + * and only present when the alert is targeting at a specific Firebase App. + */ + appId?: string; + + /** Data for an `AlertEvent` is a `FirebaseAlertData` object with a given payload. */ + data: FirebaseAlertData; +} + +/** @internal */ +export const eventType = "google.firebase.firebasealerts.alerts.v1.published"; + +/** The underlying alert type of the Firebase Alerts provider. */ +export type AlertType = + | "crashlytics.newFatalIssue" + | "crashlytics.newNonfatalIssue" + | "crashlytics.regression" + | "crashlytics.stabilityDigest" + | "crashlytics.velocity" + | "crashlytics.newAnrIssue" + | "billing.planUpdate" + | "billing.planAutomatedUpdate" + | "appDistribution.newTesterIosDevice" + | "appDistribution.inAppFeedback" + | "performance.threshold" + | string; + +/** + * Configuration for Firebase Alert functions. + */ +export interface FirebaseAlertOptions extends options.EventHandlerOptions { + /** Scope the handler to trigger on an alert type. */ + alertType: AlertType; + + /** Scope the function to trigger on a specific application. */ + appId?: string; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + * A value of null restores the defaults of 256MB. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * A value of null restores the default of 60s + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + * A value of null restores the default min instances. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + * A value of null restores the default max instances. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + * A value of null removes the VPC connector + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + * A value of null turns off VPC connector egress settings + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + * A value of null restores the default service account. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + * A value of null turns off ingress settings. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Declares a function that can handle Firebase Alerts from CloudEvents. + * @typeParam T - the type of event.data.payload. + * @param alertType - the alert type or Firebase Alert function configuration. + * @param handler a function that can handle the Firebase Alert inside a CloudEvent. + * @returns A function that you can export and deploy. + */ +export function onAlertPublished( + alertType: AlertType, + handler: (event: AlertEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle Firebase Alerts from CloudEvents. + * @typeParam T - the type of event.data.payload. + * @param options - the alert type and other options for this cloud function. + * @param handler a function that can handle the Firebase Alert inside a CloudEvent. + */ +export function onAlertPublished( + options: FirebaseAlertOptions, + handler: (event: AlertEvent) => any | Promise +): CloudFunction>; + +export function onAlertPublished( + alertTypeOrOpts: AlertType | FirebaseAlertOptions, + handler: (event: AlertEvent) => any | Promise +): CloudFunction> { + const [opts, alertType, appId] = getOptsAndAlertTypeAndApp(alertTypeOrOpts); + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(convertAlertAndApp(raw) as AlertEvent); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(opts, alertType, appId); + + return func; +} + +/** + * Helper function for getting the endpoint annotation used in alert handling functions. + * @internal + */ +export function getEndpointAnnotation( + opts: options.EventHandlerOptions, + alertType: string, + appId?: string +): ManifestEndpoint { + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOpts = options.optionsToEndpoint(opts); + const endpoint: ManifestEndpoint = { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters: { + alerttype: alertType, + }, + retry: opts.retry ?? false, + }, + }; + if (appId) { + endpoint.eventTrigger.eventFilters.appid = appId; + } + return endpoint; +} + +/** + * Helper function to parse the function opts, alert type, and appId. + * @internal + */ +export function getOptsAndAlertTypeAndApp( + alertTypeOrOpts: AlertType | FirebaseAlertOptions +): [options.EventHandlerOptions, string, string | undefined] { + let opts: options.EventHandlerOptions; + let alertType: AlertType; + let appId: string | undefined; + if (typeof alertTypeOrOpts === "string") { + alertType = alertTypeOrOpts; + opts = {}; + } else { + alertType = alertTypeOrOpts.alertType; + appId = alertTypeOrOpts.appId; + opts = { ...alertTypeOrOpts }; + delete (opts as any).alertType; + delete (opts as any).appId; + } + return [opts, alertType, appId]; +} + +/** + * Helper function to covert alert type & app id in the CloudEvent to camel case. + * @internal + */ +export function convertAlertAndApp(raw: CloudEvent): CloudEvent { + const event = { ...raw }; + + if ("alerttype" in event) { + (event as any).alertType = (event as any).alerttype; + } + if ("appid" in event) { + (event as any).appId = (event as any).appid; + } + + return event; +} diff --git a/src/v2/providers/alerts/appDistribution.ts b/src/v2/providers/alerts/appDistribution.ts new file mode 100644 index 000000000..6aa54e733 --- /dev/null +++ b/src/v2/providers/alerts/appDistribution.ts @@ -0,0 +1,348 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle Firebase App Distribution events from Firebase Alerts. + * @packageDocumentation + */ + +import { ResetValue } from "../../../common/options"; +import { Expression } from "../../../params"; +import { CloudEvent, CloudFunction } from "../../core"; +import { wrapTraceContext } from "../../trace"; +import { convertAlertAndApp, FirebaseAlertData, getEndpointAnnotation } from "./alerts"; +import * as options from "../../options"; +import { SecretParam } from "../../../params/types"; +import { withInit } from "../../../common/onInit"; + +/** + * The internal payload object for adding a new tester device to app distribution. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface NewTesterDevicePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.AppDistroNewTesterIosDevicePayload"; + /** Name of the tester */ + testerName: string; + /** Email of the tester */ + testerEmail: string; + /** The device model name */ + testerDeviceModelName: string; + /** The device ID */ + testerDeviceIdentifier: string; +} + +/** + * The internal payload object for receiving in-app feedback from a tester. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface InAppFeedbackPayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.AppDistroInAppFeedbackPayload"; + /** Resource name. Format: `projects/{project_number}/apps/{app_id}/releases/{release_id}/feedbackReports/{feedback_id}` */ + feedbackReport: string; + /** Deep link back to the Firebase console. */ + feedbackConsoleUri: string; + /** Name of the tester */ + testerName?: string; + /** Email of the tester */ + testerEmail: string; + /** + * Version consisting of `versionName` and `versionCode` for Android and + * `CFBundleShortVersionString` and `CFBundleVersion` for iOS. + */ + appVersion: string; + /** Text entered by the tester */ + text: string; + /** URI to download screenshot. This URI is fast expiring. */ + screenshotUri?: string; +} + +/** + * A custom CloudEvent for Firebase Alerts (with custom extension attributes). + * @typeParam T - the data type for app distribution alerts that is wrapped in a `FirebaseAlertData` object. + */ +export interface AppDistributionEvent extends CloudEvent> { + /** The type of the alerts that got triggered. */ + alertType: string; + /** The Firebase App ID that’s associated with the alert. */ + appId: string; +} + +/** @internal */ +export const newTesterIosDeviceAlert = "appDistribution.newTesterIosDevice"; +/** @internal */ +export const inAppFeedbackAlert = "appDistribution.inAppFeedback"; + +/** + * Configuration for app distribution functions. + */ +export interface AppDistributionOptions extends options.EventHandlerOptions { + /** Scope the function to trigger on a specific application. */ + appId?: string; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Declares a function that can handle adding a new tester iOS device. + * @param handler - Event handler which is run every time a new tester iOS device is added. + * @returns A function that you can export and deploy. + */ +export function onNewTesterIosDevicePublished( + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle adding a new tester iOS device. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler which is run every time a new tester iOS device is added. + * @returns A function that you can export and deploy. + */ +export function onNewTesterIosDevicePublished( + appId: string, + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle adding a new tester iOS device. + * @param opts - Options that can be set on the function. + * @param handler - Event handler which is run every time a new tester iOS device is added. + * @returns A function that you can export and deploy. + */ +export function onNewTesterIosDevicePublished( + opts: AppDistributionOptions, + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle adding a new tester iOS device. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler which is run every time a new tester iOS device is added. + * @returns A function that you can export and deploy. + */ +export function onNewTesterIosDevicePublished( + appIdOrOptsOrHandler: + | string + | AppDistributionOptions + | ((event: AppDistributionEvent) => any | Promise), + handler?: (event: AppDistributionEvent) => any | Promise +): CloudFunction> { + if (typeof appIdOrOptsOrHandler === "function") { + handler = appIdOrOptsOrHandler as ( + event: AppDistributionEvent + ) => any | Promise; + appIdOrOptsOrHandler = {}; + } + + const [opts, appId] = getOptsAndApp(appIdOrOptsOrHandler); + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))( + convertAlertAndApp(raw) as AppDistributionEvent + ); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(opts, newTesterIosDeviceAlert, appId); + + return func; +} + +/** + * Declares a function that can handle receiving new in-app feedback from a tester. + * @param handler - Event handler which is run every time new feedback is received. + * @returns A function that you can export and deploy. + */ +export function onInAppFeedbackPublished( + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving new in-app feedback from a tester. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler which is run every time new feedback is received. + * @returns A function that you can export and deploy. + */ +export function onInAppFeedbackPublished( + appId: string, + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving new in-app feedback from a tester. + * @param opts - Options that can be set on the function. + * @param handler - Event handler which is run every time new feedback is received. + * @returns A function that you can export and deploy. + */ +export function onInAppFeedbackPublished( + opts: AppDistributionOptions, + handler: (event: AppDistributionEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving new in-app feedback from a tester. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler which is run every time new feedback is received. + * @returns A function that you can export and deploy. + */ +export function onInAppFeedbackPublished( + appIdOrOptsOrHandler: + | string + | AppDistributionOptions + | ((event: AppDistributionEvent) => any | Promise), + handler?: (event: AppDistributionEvent) => any | Promise +): CloudFunction> { + if (typeof appIdOrOptsOrHandler === "function") { + handler = appIdOrOptsOrHandler as ( + event: AppDistributionEvent + ) => any | Promise; + appIdOrOptsOrHandler = {}; + } + + const [opts, appId] = getOptsAndApp(appIdOrOptsOrHandler); + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))( + convertAlertAndApp(raw) as AppDistributionEvent + ); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(opts, inAppFeedbackAlert, appId); + + return func; +} + +/** + * Helper function to parse the function opts and appId. + * @internal + */ +export function getOptsAndApp( + appIdOrOpts: string | AppDistributionOptions +): [options.EventHandlerOptions, string | undefined] { + let opts: options.EventHandlerOptions; + let appId: string | undefined; + if (typeof appIdOrOpts === "string") { + opts = {}; + appId = appIdOrOpts; + } else { + appId = appIdOrOpts.appId; + opts = { ...appIdOrOpts }; + delete (opts as any).appId; + } + return [opts, appId]; +} diff --git a/src/v2/providers/alerts/billing.ts b/src/v2/providers/alerts/billing.ts new file mode 100644 index 000000000..8bdb10d3d --- /dev/null +++ b/src/v2/providers/alerts/billing.ts @@ -0,0 +1,163 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle billing events from Firebase Alerts. + * @packageDocumentation + */ + +import { CloudEvent, CloudFunction } from "../../core"; +import { wrapTraceContext } from "../../trace"; +import { convertAlertAndApp, FirebaseAlertData, getEndpointAnnotation } from "./alerts"; +import * as options from "../../options"; +import { withInit } from "../../../common/onInit"; + +/** + * The internal payload object for billing plan updates. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface PlanUpdatePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.BillingPlanUpdatePayload"; + /** A Firebase billing plan. */ + billingPlan: string; + /** The email address of the person that triggered billing plan change */ + principalEmail: string; + /** The type of the notification, e.g. upgrade, downgrade */ + notificationType: string; +} + +/** + * The internal payload object for billing plan automated updates. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface PlanAutomatedUpdatePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.BillingPlanAutomatedUpdatePayload"; + /** A Firebase billing plan. */ + billingPlan: string; + /** The type of the notification, e.g. upgrade, downgrade */ + notificationType: string; +} + +/** + * A custom CloudEvent for billing Firebase Alerts (with custom extension attributes). + * @typeParam T - the data type for billing alerts that is wrapped in a `FirebaseAlertData` object. + */ +export interface BillingEvent extends CloudEvent> { + /** The type of the alerts that got triggered. */ + alertType: string; +} + +/** @internal */ +export const planUpdateAlert = "billing.planUpdate"; +/** @internal */ +export const planAutomatedUpdateAlert = "billing.planAutomatedUpdate"; + +/** + * Declares a function that can handle a billing plan update event. + * @param handler - Event handler which is run every time a billing plan is updated. + * @returns A function that you can export and deploy. + */ +export function onPlanUpdatePublished( + handler: (event: BillingEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a billing plan update event. + * @param opts - Options that can be set on the function. + * @param handler - Event handler which is run every time a billing plan is updated. + * @returns A function that you can export and deploy. + */ +export function onPlanUpdatePublished( + opts: options.EventHandlerOptions, + handler: (event: BillingEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a billing plan update event. + * @param optsOrHandler - Options or an event-handling function. + * @param handler - Event handler which is run every time a billing plan is updated. + * @returns A function that you can export and deploy. + */ +export function onPlanUpdatePublished( + optsOrHandler: + | options.EventHandlerOptions + | ((event: BillingEvent) => any | Promise), + handler?: (event: BillingEvent) => any | Promise +): CloudFunction> { + return onOperation(planUpdateAlert, optsOrHandler, handler); +} + +/** + * Declares a function that can handle an automated billing plan update event. + * @param handler - Event handler which is run every time an automated billing plan update occurs. + * @returns A function that you can export and deploy. + */ +export function onPlanAutomatedUpdatePublished( + handler: (event: BillingEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle an automated billing plan update event. + * @param opts - Options that can be set on the function. + * @param handler - Event handler which is run every time an automated billing plan update occurs. + * @returns A function that you can export and deploy. + */ +export function onPlanAutomatedUpdatePublished( + opts: options.EventHandlerOptions, + handler: (event: BillingEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle an automated billing plan update event. + * @param optsOrHandler - Options or an event-handling function. + * @param handler - Event handler which is run every time an automated billing plan update occurs. + * @returns A function that you can export and deploy. + */ +export function onPlanAutomatedUpdatePublished( + optsOrHandler: + | options.EventHandlerOptions + | ((event: BillingEvent) => any | Promise), + handler?: (event: BillingEvent) => any | Promise +): CloudFunction> { + return onOperation(planAutomatedUpdateAlert, optsOrHandler, handler); +} + +/** @internal */ +export function onOperation( + alertType: string, + optsOrHandler: options.EventHandlerOptions | ((event: BillingEvent) => any | Promise), + handler: (event: BillingEvent) => any | Promise +): CloudFunction> { + if (typeof optsOrHandler === "function") { + handler = optsOrHandler as (event: BillingEvent) => any | Promise; + optsOrHandler = {}; + } + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(convertAlertAndApp(raw) as BillingEvent); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(optsOrHandler, alertType); + + return func; +} diff --git a/src/v2/providers/alerts/crashlytics.ts b/src/v2/providers/alerts/crashlytics.ts new file mode 100644 index 000000000..9fd2b26bb --- /dev/null +++ b/src/v2/providers/alerts/crashlytics.ts @@ -0,0 +1,612 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle Crashlytics events from Firebase Alerts. + * @packageDocumentation + */ + +import { ResetValue } from "../../../common/options"; +import { Expression } from "../../../params"; +import { CloudEvent, CloudFunction } from "../../core"; +import { wrapTraceContext } from "../../trace"; +import { convertAlertAndApp, FirebaseAlertData, getEndpointAnnotation } from "./alerts"; +import * as options from "../../options"; +import { SecretParam } from "../../../params/types"; +import { withInit } from "../../../common/onInit"; + +/** Generic Crashlytics issue interface */ +export interface Issue { + /** The ID of the Crashlytics issue */ + id: string; + /** The title of the Crashlytics issue */ + title: string; + /** The subtitle of the Crashlytics issue */ + subtitle: string; + /** The application version of the Crashlytics issue */ + appVersion: string; +} + +/** + * The internal payload object for a new fatal issue. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface NewFatalIssuePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsNewFatalIssuePayload"; + /** Basic information of the Crashlytics issue */ + issue: Issue; +} + +/** + * The internal payload object for a new non-fatal issue. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface NewNonfatalIssuePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsNewNonfatalIssuePayload"; + /** Basic information of the Crashlytics issue */ + issue: Issue; +} + +/** + * The internal payload object for a regression alert. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface RegressionAlertPayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsRegressionAlertPayload"; + /** The type of the Crashlytics issue, e.g. new fatal, new nonfatal, ANR */ + type: string; + /** Basic information of the Crashlytics issue */ + issue: Issue; + /** + * The time that the Crashlytics issues was most recently resolved before it + * began to reoccur. + */ + resolveTime: string; +} + +/** Generic Crashlytics trending issue interface */ +export interface TrendingIssueDetails { + /** The type of the Crashlytics issue, e.g. new fatal, new nonfatal, ANR */ + type: string; + /** Basic information of the Crashlytics issue */ + issue: Issue; + /** The number of crashes that occurred with the issue */ + eventCount: number; + /** The number of distinct users that were affected by the issue */ + userCount: number; +} + +/** + * The internal payload object for a stability digest. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface StabilityDigestPayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsStabilityDigestPayload"; + /** + * The date that the digest gets created. Issues in the digest should have the + * same date as the digest date + */ + digestDate: string; + /** A stability digest containing several trending Crashlytics issues */ + trendingIssues: TrendingIssueDetails[]; +} + +/** + * The internal payload object for a velocity alert. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface VelocityAlertPayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsVelocityAlertPayload"; + /** Basic information of the Crashlytics issue */ + issue: Issue; + /** The time that the Crashlytics issue gets created */ + createTime: string; + /** + * The number of user sessions for the given app version that had this + * specific crash issue in the time period used to trigger the velocity alert. + */ + crashCount: number; + /** + * The percentage of user sessions for the given app version that had this + * specific crash issue in the time period used to trigger the velocity alert. + */ + crashPercentage: number; + /** + * The first app version where this issue was seen, and not necessarily the + * version that has triggered the alert. + */ + firstVersion: string; +} + +/** + * The internal payload object for a new Application Not Responding issue. + * Payload is wrapped inside a `FirebaseAlertData` object. + */ +export interface NewAnrIssuePayload { + ["@type"]: "type.googleapis.com/google.events.firebase.firebasealerts.v1.CrashlyticsNewAnrIssuePayload"; + /** Basic information of the Crashlytics issue */ + issue: Issue; +} + +/** + * A custom CloudEvent for Firebase Alerts (with custom extension attributes). + * @typeParam T - the data type for Crashlytics alerts that is wrapped in a `FirebaseAlertData` object. + */ +export interface CrashlyticsEvent extends CloudEvent> { + /** The type of the alerts that got triggered. */ + alertType: string; + /** The Firebase App ID that’s associated with the alert. */ + appId: string; +} + +/** @internal */ +export const newFatalIssueAlert = "crashlytics.newFatalIssue"; +/** @internal */ +export const newNonfatalIssueAlert = "crashlytics.newNonfatalIssue"; +/** @internal */ +export const regressionAlert = "crashlytics.regression"; +/** @internal */ +export const stabilityDigestAlert = "crashlytics.stabilityDigest"; +/** @internal */ +export const velocityAlert = "crashlytics.velocity"; +/** @internal */ +export const newAnrIssueAlert = "crashlytics.newAnrIssue"; + +/** + * Configuration for Crashlytics functions. + */ +export interface CrashlyticsOptions extends options.EventHandlerOptions { + /** Scope the function to trigger on a specific application. */ + appId?: string; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Declares a function that can handle a new fatal issue published to Crashlytics. + * @param handler - Event handler that is triggered when a new fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewFatalIssuePublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new fatal issue published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a new fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewFatalIssuePublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new fatal issue published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a new fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewFatalIssuePublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new fatal issue published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a new fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewFatalIssuePublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(newFatalIssueAlert, appIdOrOptsOrHandler, handler); +} + +/** + * Declares a function that can handle a new non-fatal issue published to Crashlytics. + * @param handler - Event handler that is triggered when a new fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewNonfatalIssuePublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new non-fatal issue published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a new non-fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewNonfatalIssuePublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new non-fatal issue published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a new non-fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewNonfatalIssuePublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new non-fatal issue published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a new non-fatal issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewNonfatalIssuePublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(newNonfatalIssueAlert, appIdOrOptsOrHandler, handler); +} + +/** + * Declares a function that can handle a regression alert published to Crashlytics. + * @param handler - Event handler that is triggered when a regression alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onRegressionAlertPublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a regression alert published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a regression alert is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onRegressionAlertPublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a regression alert published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a regression alert is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onRegressionAlertPublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a regression alert published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a regression alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onRegressionAlertPublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(regressionAlert, appIdOrOptsOrHandler, handler); +} + +/** + * Declares a function that can handle a stability digest published to Crashlytics. + * @param handler - Event handler that is triggered when a stability digest is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onStabilityDigestPublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a stability digest published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a stability digest is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onStabilityDigestPublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a stability digest published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a stability digest is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onStabilityDigestPublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a stability digest published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a stability digest is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onStabilityDigestPublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(stabilityDigestAlert, appIdOrOptsOrHandler, handler); +} + +/** + * Declares a function that can handle a velocity alert published to Crashlytics. + * @param handler - Event handler that is triggered when a velocity alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onVelocityAlertPublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a velocity alert published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a velocity alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onVelocityAlertPublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a velocity alert published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a velocity alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onVelocityAlertPublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a velocity alert published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a velocity alert is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onVelocityAlertPublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(velocityAlert, appIdOrOptsOrHandler, handler); +} + +/** + * Declares a function that can handle a new Application Not Responding issue published to Crashlytics. + * @param handler - Event handler that is triggered when a new Application Not Responding issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewAnrIssuePublished( + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new Application Not Responding issue published to Crashlytics. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler that is triggered when a new Application Not Responding issue is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onNewAnrIssuePublished( + appId: string, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new Application Not Responding issue published to Crashlytics. + * @param opts - Options that can be set on the function. + * @param handler - Event handler that is triggered when a new Application Not Responding issue is published to Crashlytics. + * @returns A function that you can export and deploy. + + */ +export function onNewAnrIssuePublished( + opts: CrashlyticsOptions, + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle a new Application Not Responding issue published to Crashlytics. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler that is triggered when a new Application Not Responding issue is published to Crashlytics. + * @returns A function that you can export and deploy. + */ +export function onNewAnrIssuePublished( + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler?: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + return onOperation(newAnrIssueAlert, appIdOrOptsOrHandler, handler); +} + +/** @internal */ +export function onOperation( + alertType: string, + appIdOrOptsOrHandler: + | string + | CrashlyticsOptions + | ((event: CrashlyticsEvent) => any | Promise), + handler: (event: CrashlyticsEvent) => any | Promise +): CloudFunction> { + if (typeof appIdOrOptsOrHandler === "function") { + handler = appIdOrOptsOrHandler as (event: CrashlyticsEvent) => any | Promise; + appIdOrOptsOrHandler = {}; + } + + const [opts, appId] = getOptsAndApp(appIdOrOptsOrHandler); + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(convertAlertAndApp(raw) as CrashlyticsEvent); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(opts, alertType, appId); + + return func; +} + +/** + * Helper function to parse the function opts and appId. + * @internal + */ +export function getOptsAndApp( + appIdOrOpts: string | CrashlyticsOptions +): [options.EventHandlerOptions, string | undefined] { + let opts: options.EventHandlerOptions; + let appId: string | undefined; + if (typeof appIdOrOpts === "string") { + opts = {}; + appId = appIdOrOpts; + } else { + appId = appIdOrOpts.appId; + opts = { ...appIdOrOpts }; + delete (opts as any).appId; + } + return [opts, appId]; +} diff --git a/src/v2/providers/alerts/index.ts b/src/v2/providers/alerts/index.ts new file mode 100644 index 000000000..b17f66dcb --- /dev/null +++ b/src/v2/providers/alerts/index.ts @@ -0,0 +1,36 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle events from Firebase Alerts. + * Subpackages give stronger typing to specific services which + * notify users via Firebase Alerts. + * @packageDocumentation + */ + +import * as appDistribution from "./appDistribution"; +import * as billing from "./billing"; +import * as crashlytics from "./crashlytics"; +import * as performance from "./performance"; + +export { appDistribution, billing, crashlytics, performance }; +export * from "./alerts"; diff --git a/src/v2/providers/alerts/performance.ts b/src/v2/providers/alerts/performance.ts new file mode 100644 index 000000000..9ee3f7beb --- /dev/null +++ b/src/v2/providers/alerts/performance.ts @@ -0,0 +1,183 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle Firebase Performance Monitoring events from Firebase Alerts. + * @packageDocumentation + */ + +import { withInit } from "../../../common/onInit"; +import { CloudEvent, CloudFunction } from "../../core"; +import { EventHandlerOptions } from "../../options"; +import { wrapTraceContext } from "../../trace"; +import { convertAlertAndApp, FirebaseAlertData, getEndpointAnnotation } from "./alerts"; + +/** + * The internal payload object for a performance threshold alert. + * Payload is wrapped inside a {@link FirebaseAlertData} object. + */ +export interface ThresholdAlertPayload { + /** Name of the trace or network request this alert is for (e.g. my_custom_trace, firebase.com/api/123) */ + eventName: string; + /** The resource type this alert is for (i.e. trace, network request, screen rendering, etc.) */ + eventType: string; + /** The metric type this alert is for (i.e. success rate, response time, duration, etc.) */ + metricType: string; + /** The number of events checked for this alert condition */ + numSamples: number; + /** The threshold value of the alert condition without units (e.g. "75", "2.1") */ + thresholdValue: number; + /** The unit for the alert threshold (e.g. "percent", "seconds") */ + thresholdUnit: string; + /** The percentile of the alert condition, can be 0 if percentile is not applicable to the alert condition and omitted; range: [1, 100] */ + conditionPercentile?: number; + /** The app version this alert was triggered for, can be omitted if the alert is for a network request (because the alert was checked against data from all versions of app) or a web app (where the app is versionless) */ + appVersion?: string; + /** The value that violated the alert condition (e.g. "76.5", "3") */ + violationValue: number; + /** The unit for the violation value (e.g. "percent", "seconds") */ + violationUnit: string; + /** The link to Fireconsole to investigate more into this alert */ + investigateUri: string; +} + +/** + * A custom CloudEvent for Firebase Alerts (with custom extension attributes). + * @typeParam T - the data type for performance alerts that is wrapped in a `FirebaseAlertData` object. + */ +export interface PerformanceEvent extends CloudEvent> { + /** The type of the alerts that got triggered. */ + alertType: string; + /** The Firebase App ID that’s associated with the alert. */ + appId: string; +} + +/** @internal */ +export const thresholdAlert = "performance.threshold"; + +/** + * Configuration for app distribution functions. + */ +export interface PerformanceOptions extends EventHandlerOptions { + /** Scope the function to trigger on a specific application. */ + appId?: string; +} + +/** + * Declares a function that can handle receiving performance threshold alerts. + * @param handler - Event handler which is run every time a threshold alert is received. + * @returns A function that you can export and deploy. + */ +export function onThresholdAlertPublished( + handler: (event: PerformanceEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving performance threshold alerts. + * @param appId - A specific application the handler will trigger on. + * @param handler - Event handler which is run every time a threshold alert is received. + * @returns A function that you can export and deploy. + */ +export function onThresholdAlertPublished( + appId: string, + handler: (event: PerformanceEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving performance threshold alerts. + * @param opts - Options that can be set on the function. + * @param handler - Event handler which is run every time a threshold alert is received. + * @returns A function that you can export and deploy. + */ +export function onThresholdAlertPublished( + opts: PerformanceOptions, + handler: (event: PerformanceEvent) => any | Promise +): CloudFunction>; + +/** + * Declares a function that can handle receiving performance threshold alerts. + * @param appIdOrOptsOrHandler - A specific application, options, or an event-handling function. + * @param handler - Event handler which is run every time a threshold alert is received. + * @returns A function that you can export and deploy. + */ +export function onThresholdAlertPublished( + appIdOrOptsOrHandler: + | string + | PerformanceOptions + | ((event: PerformanceEvent) => any | Promise), + handler?: (event: PerformanceEvent) => any | Promise +): CloudFunction> { + if (typeof appIdOrOptsOrHandler === "function") { + handler = appIdOrOptsOrHandler as ( + event: PerformanceEvent + ) => any | Promise; + appIdOrOptsOrHandler = {}; + } + + const [opts, appId] = getOptsAndApp(appIdOrOptsOrHandler); + + const func = (raw: CloudEvent) => { + const event = convertAlertAndApp(raw) as PerformanceEvent; + const convertedPayload = convertPayload(event.data.payload); + event.data.payload = convertedPayload; + return wrapTraceContext(withInit(handler(event))); + }; + + func.run = handler; + func.__endpoint = getEndpointAnnotation(opts, thresholdAlert, appId); + + return func; +} + +/** + * Helper function to parse the function opts and appId. + * @internal + */ +export function getOptsAndApp( + appIdOrOpts: string | PerformanceOptions +): [EventHandlerOptions, string | undefined] { + if (typeof appIdOrOpts === "string") { + return [{}, appIdOrOpts]; + } + + const opts: EventHandlerOptions = { ...appIdOrOpts }; + const appId: string | undefined = appIdOrOpts.appId; + delete (opts as any).appId; + + return [opts, appId]; +} + +/** + * Helper function to convert the raw payload of a {@link PerformanceEvent} to a {@link ThresholdAlertPayload} + * @internal + */ +export function convertPayload(raw: ThresholdAlertPayload): ThresholdAlertPayload { + const payload: ThresholdAlertPayload = { ...raw }; + if (typeof payload.conditionPercentile !== "undefined" && payload.conditionPercentile === 0) { + delete (payload as any).conditionPercentile; + } + if (typeof payload.appVersion !== "undefined" && payload.appVersion.length === 0) { + delete (payload as any).appVersion; + } + + return payload; +} diff --git a/src/v2/providers/database.ts b/src/v2/providers/database.ts new file mode 100644 index 000000000..c85f01172 --- /dev/null +++ b/src/v2/providers/database.ts @@ -0,0 +1,519 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { getApp } from "../../common/app"; +import { Change } from "../../common/change"; +import { ParamsOf } from "../../common/params"; +import { ResetValue } from "../../common/options"; +import { DataSnapshot } from "../../common/providers/database"; +import { normalizePath } from "../../common/utilities/path"; +import { PathPattern } from "../../common/utilities/path-pattern"; +import { applyChange } from "../../common/utilities/utils"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { Expression } from "../../params"; +import { wrapTraceContext } from "../trace"; +import * as options from "../options"; +import { SecretParam } from "../../params/types"; +import { withInit } from "../../common/onInit"; + +export { DataSnapshot }; + +/** @internal */ +export const writtenEventType = "google.firebase.database.ref.v1.written"; + +/** @internal */ +export const createdEventType = "google.firebase.database.ref.v1.created"; + +/** @internal */ +export const updatedEventType = "google.firebase.database.ref.v1.updated"; + +/** @internal */ +export const deletedEventType = "google.firebase.database.ref.v1.deleted"; + +/** @hidden */ +export interface RawRTDBCloudEventData { + ["@type"]: "type.googleapis.com/google.events.firebase.database.v1.ReferenceEventData"; + data: any; + delta: any; +} + +/** @hidden */ +export interface RawRTDBCloudEvent extends CloudEvent { + firebasedatabasehost: string; + instance: string; + ref: string; + location: string; +} + +/** A CloudEvent that contains a DataSnapshot or a Change */ +export interface DatabaseEvent> extends CloudEvent { + /** The domain of the database instance */ + firebaseDatabaseHost: string; + /** The instance ID portion of the fully qualified resource name */ + instance: string; + /** The database reference path */ + ref: string; + /** The location of the database */ + location: string; + /** + * An object containing the values of the path patterns. + * Only named capture groups will be populated - {key}, {key=*}, {key=**} + */ + params: Params; +} + +/** ReferenceOptions extend EventHandlerOptions with provided ref and optional instance */ +export interface ReferenceOptions extends options.EventHandlerOptions { + /** + * Specify the handler to trigger on a database reference(s). + * This value can either be a single reference or a pattern. + * Examples: '/foo/bar', '/foo/{bar}' + */ + ref: Ref; + + /** + * Specify the handler to trigger on a database instance(s). + * If present, this value can either be a single instance or a pattern. + * Examples: 'my-instance-1', 'my-instance-*' + * Note: The capture syntax cannot be used for 'instance'. + */ + instance?: string; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Event handler which triggers when data is created, updated, or deleted in Realtime Database. + * + * @param reference - The database reference path to trigger on. + * @param handler - Event handler which is run every time a Realtime Database create, update, or delete occurs. + */ +export function onValueWritten( + ref: Ref, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>>; + +/** + * Event handler which triggers when data is created, updated, or deleted in Realtime Database. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Realtime Database create, update, or delete occurs. + */ +export function onValueWritten( + opts: ReferenceOptions, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>>; + +/** + * Event handler which triggers when data is created, updated, or deleted in Realtime Database. + * + * @param referenceOrOpts - Options or a string reference. + * @param handler - Event handler which is run every time a Realtime Database create, update, or delete occurs. + */ +export function onValueWritten( + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>> { + return onChangedOperation(writtenEventType, referenceOrOpts, handler); +} + +/** + * Event handler which triggers when data is created in Realtime Database. + * + * @param reference - The database reference path to trigger on. + * @param handler - Event handler which is run every time a Realtime Database create occurs. + */ +export function onValueCreated( + ref: Ref, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>>; + +/** + * Event handler which triggers when data is created in Realtime Database. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Realtime Database create occurs. + */ +export function onValueCreated( + opts: ReferenceOptions, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>>; + +/** + * Event handler which triggers when data is created in Realtime Database. + * + * @param referenceOrOpts - Options or a string reference. + * @param handler - Event handler which is run every time a Realtime Database create occurs. + */ +export function onValueCreated( + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>> { + return onOperation(createdEventType, referenceOrOpts, handler); +} + +/** + * Event handler which triggers when data is updated in Realtime Database. + * + * @param reference - The database reference path to trigger on. + * @param handler - Event handler which is run every time a Realtime Database update occurs. + */ +export function onValueUpdated( + ref: Ref, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>>; + +/** + * Event handler which triggers when data is updated in Realtime Database. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Realtime Database update occurs. + */ +export function onValueUpdated( + opts: ReferenceOptions, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>>; + +/** + * Event handler which triggers when data is updated in Realtime Database. + * + * @param referenceOrOpts - Options or a string reference. + * @param handler - Event handler which is run every time a Realtime Database update occurs. + */ +export function onValueUpdated( + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>> { + return onChangedOperation(updatedEventType, referenceOrOpts, handler); +} + +/** + * Event handler which triggers when data is deleted in Realtime Database. + * + * @param reference - The database reference path to trigger on. + * @param handler - Event handler which is run every time a Realtime Database deletion occurs. + */ +export function onValueDeleted( + ref: Ref, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>>; + +/** + * Event handler which triggers when data is deleted in Realtime Database. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Realtime Database deletion occurs. + */ +export function onValueDeleted( + opts: ReferenceOptions, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>>; + +/** + * Event handler which triggers when data is deleted in Realtime Database. + * + * @param referenceOrOpts - Options or a string reference. + * @param handler - Event handler which is run every time a Realtime Database deletion occurs. + */ +export function onValueDeleted( + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>> { + // TODO - need to use event.data.delta + return onOperation(deletedEventType, referenceOrOpts, handler); +} + +/** @internal */ +export function getOpts(referenceOrOpts: string | ReferenceOptions) { + let path: string; + let instance: string; + let opts: options.EventHandlerOptions; + if (typeof referenceOrOpts === "string") { + path = normalizePath(referenceOrOpts); + instance = "*"; + opts = {}; + } else { + path = normalizePath(referenceOrOpts.ref); + instance = referenceOrOpts.instance || "*"; + opts = { ...referenceOrOpts }; + delete (opts as any).ref; + delete (opts as any).instance; + } + + return { + path, + instance, + opts, + }; +} + +/** @internal */ +export function makeParams(event: RawRTDBCloudEvent, path: PathPattern, instance: PathPattern) { + return { + ...path.extractMatches(event.ref), + ...instance.extractMatches(event.instance), + }; +} + +/** @hidden */ +function makeDatabaseEvent( + event: RawRTDBCloudEvent, + data: any, + instance: string, + params: Params +): DatabaseEvent { + const snapshot = new DataSnapshot(data, event.ref, getApp(), instance); + const databaseEvent: DatabaseEvent = { + ...event, + firebaseDatabaseHost: event.firebasedatabasehost, + data: snapshot, + params, + }; + delete (databaseEvent as any).firebasedatabasehost; + return databaseEvent; +} + +/** @hidden */ +function makeChangedDatabaseEvent( + event: RawRTDBCloudEvent, + instance: string, + params: Params +): DatabaseEvent, Params> { + const before = new DataSnapshot(event.data.data, event.ref, getApp(), instance); + const after = new DataSnapshot( + applyChange(event.data.data, event.data.delta), + event.ref, + getApp(), + instance + ); + const databaseEvent: DatabaseEvent, Params> = { + ...event, + firebaseDatabaseHost: event.firebasedatabasehost, + data: { + before, + after, + }, + params, + }; + delete (databaseEvent as any).firebasedatabasehost; + return databaseEvent; +} + +/** @internal */ +export function makeEndpoint( + eventType: string, + opts: options.EventHandlerOptions, + path: PathPattern, + instance: PathPattern +): ManifestEndpoint { + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOpts = options.optionsToEndpoint(opts); + + const eventFilters: Record = {}; + const eventFilterPathPatterns: Record = { + // Note: Eventarc always treats ref as a path pattern + ref: path.getValue(), + }; + if (instance.hasWildcards()) { + eventFilterPathPatterns.instance = instance.getValue(); + } else { + eventFilters.instance = instance.getValue(); + } + + return { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters, + eventFilterPathPatterns, + retry: opts.retry ?? false, + }, + }; +} + +/** @internal */ +export function onChangedOperation( + eventType: string, + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent, ParamsOf>) => any | Promise +): CloudFunction, ParamsOf>> { + const { path, instance, opts } = getOpts(referenceOrOpts); + + const pathPattern = new PathPattern(path); + const instancePattern = new PathPattern(instance); + + // wrap the handler + const func = (raw: CloudEvent) => { + const event = raw as RawRTDBCloudEvent; + const instanceUrl = getInstance(event); + const params = makeParams(event, pathPattern, instancePattern) as unknown as ParamsOf; + const databaseEvent = makeChangedDatabaseEvent(event, instanceUrl, params); + // Intentionally put init in the context of traces in case there is something + // expensive to observe. + return wrapTraceContext(withInit(handler))(databaseEvent); + }; + + func.run = handler; + + func.__endpoint = makeEndpoint(eventType, opts, pathPattern, instancePattern); + + return func; +} + +/** @internal */ +export function onOperation( + eventType: string, + referenceOrOpts: Ref | ReferenceOptions, + handler: (event: DatabaseEvent>) => any | Promise +): CloudFunction>> { + const { path, instance, opts } = getOpts(referenceOrOpts); + + const pathPattern = new PathPattern(path); + const instancePattern = new PathPattern(instance); + + // wrap the handler + const func = (raw: CloudEvent) => { + const event = raw as RawRTDBCloudEvent; + const instanceUrl = getInstance(event); + const params = makeParams(event, pathPattern, instancePattern) as unknown as ParamsOf; + const data = eventType === deletedEventType ? event.data.data : event.data.delta; + const databaseEvent = makeDatabaseEvent(event, data, instanceUrl, params); + return wrapTraceContext(withInit(handler))(databaseEvent); + }; + + func.run = handler; + + func.__endpoint = makeEndpoint(eventType, opts, pathPattern, instancePattern); + + return func; +} + +function getInstance(event: RawRTDBCloudEvent) { + const emuHost = process.env.FIREBASE_DATABASE_EMULATOR_HOST; + return emuHost + ? `http://${emuHost}/?ns=${event.instance}` + : `https://${event.instance}.${event.firebasedatabasehost}`; +} diff --git a/src/v2/providers/dataconnect.ts b/src/v2/providers/dataconnect.ts new file mode 100644 index 000000000..a972e655c --- /dev/null +++ b/src/v2/providers/dataconnect.ts @@ -0,0 +1,370 @@ +// The MIT License (MIT) +// +// Copyright (c) 2025 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { CloudEvent, CloudFunction } from "../core"; +import { ParamsOf, VarName } from "../../common/params"; +import { + EventHandlerOptions, + getGlobalOptions, + optionsToEndpoint, + SupportedRegion, +} from "../options"; +import { normalizePath } from "../../common/utilities/path"; +import { wrapTraceContext } from "../trace"; +import { withInit } from "../../common/onInit"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { PathPattern } from "../../common/utilities/path-pattern"; +import { Expression } from "../../params"; +import { ResetValue } from "../../common/options"; + +/** @internal */ +export const mutationExecutedEventType = + "google.firebase.dataconnect.connector.v1.mutationExecuted"; + +/** @hidden */ +export interface SourceLocation { + line: number; + column: number; +} + +/** @hidden */ +export interface GraphqlErrorExtensions { + file: string; + code: string; + debugDetails: string; +} + +/** @hidden */ +export interface GraphqlError { + message: string; + locations: Array; + path: Array; + extensions: GraphqlErrorExtensions; +} + +/** @hidden */ +export interface RawMutation { + data: R; + variables: V; + errors: Array; +} + +/** @hidden */ +export interface MutationEventData { + ["@type"]: "type.googleapis.com/google.events.firebase.dataconnect.v1.MutationEventData"; + payload: RawMutation; +} + +/** @hidden */ +export interface RawDataConnectEvent extends CloudEvent { + project: string; + location: string; + service: string; + schema: string; + connector: string; + operation: string; + authtype: AuthType; + authid?: string; +} + +/** + * AuthType defines the possible values for the authType field in a Firebase Data Connect event. + * - app_user: an end user of an application.. + * - admin: an admin user of an application. In the context of impersonate endpoints used by the admin SDK, the impersonator. + * - unknown: a general type to capture all other principals not captured in the other auth types. + */ +export type AuthType = "app_user" | "admin" | "unknown"; + +/** OperationOptions extend EventHandlerOptions with a provided service, connector, and operation. */ +export interface OperationOptions< + Service extends string = string, + Connector extends string = string, + Operation extends string = string +> extends EventHandlerOptions { + /** Firebase Data Connect service ID */ + service?: Service; + /** Firebase Data Connect connector ID */ + connector?: Connector; + /** Name of the operation */ + operation?: Operation; + /** + * Region where functions should be deployed. Defaults to us-central1. + */ + region?: SupportedRegion | string | Expression | ResetValue; +} + +export type DataConnectParams = + PathPatternOrOptions extends string + ? ParamsOf + : PathPatternOrOptions extends OperationOptions< + infer Service extends string, + infer Connector extends string, + infer Operation extends string + > + ? Record | VarName | VarName, string> + : never; + +export interface DataConnectEvent> extends CloudEvent { + /** The location of the Firebase Data Connect instance */ + location: string; + /** The project identifier */ + project: string; + /** + * An object containing the values of the path patterns. + * Only named capture groups will be populated - {key}, {key=*}, {key=**}. + */ + params: Params; + /** The type of principal that triggered the event */ + authType: AuthType; + /** The unique identifier for the principal */ + authId?: string; +} + +/** + * Event handler that triggers when a mutation is executed in Firebase Data Connect. + * + * @param mutation - The mutation path to trigger on. + * @param handler - Event handler which is run every time a mutation is executed. + */ +export function onMutationExecuted< + Mutation extends string, + Variables = unknown, + ResponseData = unknown +>( + mutation: Mutation, + handler: ( + event: DataConnectEvent, DataConnectParams> + ) => unknown | Promise +): CloudFunction< + DataConnectEvent, DataConnectParams> +>; + +/** + * Event handler that triggers when a mutation is executed in Firebase Data Connect. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a mutation is executed. + */ +export function onMutationExecuted< + Options extends OperationOptions, + Variables = unknown, + ResponseData = unknown +>( + opts: Options, + handler: ( + event: DataConnectEvent, DataConnectParams> + ) => unknown | Promise +): CloudFunction< + DataConnectEvent, DataConnectParams> +>; + +/** + * Event handler that triggers when a mutation is executed in Firebase Data Connect. + * + * @param mutationOrOpts - Options or string mutation path. + * @param handler - Event handler which is run every time a mutation is executed. + */ +export function onMutationExecuted< + PathPatternOrOptions extends string | OperationOptions, + Variables = unknown, + ResponseData = unknown +>( + mutationOrOpts: PathPatternOrOptions, + handler: ( + event: DataConnectEvent< + MutationEventData, + DataConnectParams + > + ) => unknown | Promise +): CloudFunction< + DataConnectEvent< + MutationEventData, + DataConnectParams + > +> { + return onOperation( + mutationExecutedEventType, + mutationOrOpts, + handler + ); +} + +function getOpts(mutationOrOpts: string | OperationOptions) { + const operationRegex = new RegExp("services/([^/]+)/connectors/([^/]*)/operations/([^/]+)"); + + let service: string | undefined; + let connector: string | undefined; + let operation: string | undefined; + let opts: EventHandlerOptions; + if (typeof mutationOrOpts === "string") { + const path = normalizePath(mutationOrOpts); + const match = path.match(operationRegex); + if (!match) { + throw new Error(`Invalid operation path: ${path}`); + } + + service = match[1]; + connector = match[2]; + operation = match[3]; + opts = {}; + } else { + service = mutationOrOpts.service; + connector = mutationOrOpts.connector; + operation = mutationOrOpts.operation; + opts = { ...mutationOrOpts }; + + delete (opts as any).service; + delete (opts as any).connector; + delete (opts as any).operation; + } + + return { + service, + connector, + operation, + opts, + }; +} + +function makeEndpoint( + eventType: string, + opts: EventHandlerOptions, + service: PathPattern | undefined, + connector: PathPattern | undefined, + operation: PathPattern | undefined +): ManifestEndpoint { + const baseOpts = optionsToEndpoint(getGlobalOptions()); + const specificOpts = optionsToEndpoint(opts); + + const eventFilters: Record = {}; + const eventFilterPathPatterns: Record = {}; + + if (service) { + if (service.hasWildcards()) { + eventFilterPathPatterns.service = service.getValue(); + } else { + eventFilters.service = service.getValue(); + } + } + if (connector) { + if (connector.hasWildcards()) { + eventFilterPathPatterns.connector = connector.getValue(); + } else { + eventFilters.connector = connector.getValue(); + } + } + if (operation) { + if (operation.hasWildcards()) { + eventFilterPathPatterns.operation = operation.getValue(); + } else { + eventFilters.operation = operation.getValue(); + } + } + return { + ...initV2Endpoint(getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters, + eventFilterPathPatterns, + retry: opts.retry ?? false, + }, + }; +} + +function makeParams( + event: RawDataConnectEvent>, + service: PathPattern | undefined, + connector: PathPattern | undefined, + operation: PathPattern | undefined +) { + return { + ...service?.extractMatches(event.service), + ...connector?.extractMatches(event.connector), + ...operation?.extractMatches(event.operation), + }; +} + +function onOperation( + eventType: string, + mutationOrOpts: PathPatternOrOptions, + handler: ( + event: DataConnectEvent< + MutationEventData, + DataConnectParams + > + ) => any | Promise +): CloudFunction< + DataConnectEvent< + MutationEventData, + DataConnectParams + > +> { + const { service, connector, operation, opts } = getOpts(mutationOrOpts); + + const servicePattern = service ? new PathPattern(service) : undefined; + const connectorPattern = connector ? new PathPattern(connector) : undefined; + const operationPattern = operation ? new PathPattern(operation) : undefined; + + // wrap the handler + const func = (raw: CloudEvent) => { + const event = raw as RawDataConnectEvent>; + const params = makeParams( + event, + servicePattern, + connectorPattern, + operationPattern + ); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars -- service, connector, operation are destructured to exclude from ...rest + const { authtype, authid, service, connector, operation, ...rest } = event; + const dataConnectEvent: DataConnectEvent< + MutationEventData, + DataConnectParams + > = { + ...rest, + authType: authtype, + authId: authid, + params: params as DataConnectParams, + }; + + return wrapTraceContext(withInit(handler))(dataConnectEvent); + }; + + func.run = handler; + + func.__endpoint = makeEndpoint( + eventType, + opts, + servicePattern, + connectorPattern, + operationPattern + ); + + return func; +} diff --git a/src/v2/providers/eventarc.ts b/src/v2/providers/eventarc.ts new file mode 100644 index 000000000..48f5974be --- /dev/null +++ b/src/v2/providers/eventarc.ts @@ -0,0 +1,237 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to integrate directly with Eventarc. + * @packageDocumentation + */ + +import { convertIfPresent, copyIfPresent } from "../../common/encoding"; +import { ResetValue } from "../../common/options"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { wrapTraceContext } from "../trace"; +import { Expression } from "../../params"; +import * as options from "../options"; +import { SecretParam } from "../../params/types"; +import { withInit } from "../../common/onInit"; + +/** Options that can be set on an Eventarc trigger. */ +export interface EventarcTriggerOptions extends options.EventHandlerOptions { + /** + * Type of the event to trigger on. + */ + eventType: string; + + /** + * ID of the channel. Can be either: + * * fully qualified channel resource name: + * `projects/{project}/locations/{location}/channels/{channel-id}` + * * partial resource name with location and channel ID, in which case + * the runtime project ID of the function will be used: + * `locations/{location}/channels/{channel-id}` + * * partial channel ID, in which case the runtime project ID of the + * function and `us-central1` as location will be used: + * `{channel-id}` + * + * If not specified, the default Firebase channel will be used: + * `projects/{project}/locations/us-central1/channels/firebase` + */ + channel?: string; + + /** + * Eventarc event exact match filter. + */ + filters?: Record; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** Handles an Eventarc event published on the default channel. + * @param eventType - Type of the event to trigger on. + * @param handler - A function to execute when triggered. + * @returns A function that you can export and deploy. + */ +export function onCustomEventPublished( + eventType: string, + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +/** Handles an Eventarc event. + * @param opts - Options to set on this function + * @param handler - A function to execute when triggered. + * @returns A function that you can export and deploy. + */ +export function onCustomEventPublished( + opts: EventarcTriggerOptions, + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +export function onCustomEventPublished( + eventTypeOrOpts: string | EventarcTriggerOptions, + handler: (event: CloudEvent) => any | Promise +): CloudFunction> { + let opts: EventarcTriggerOptions; + if (typeof eventTypeOrOpts === "string") { + opts = { + eventType: eventTypeOrOpts, + }; + } else if (typeof eventTypeOrOpts === "object") { + opts = eventTypeOrOpts; + } + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(raw as CloudEvent); + }; + + func.run = handler; + + const channel = opts.channel ?? "locations/us-central1/channels/firebase"; + + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOpts = options.optionsToEndpoint(opts); + + const endpoint: ManifestEndpoint = { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType: opts.eventType, + eventFilters: {}, + retry: opts.retry ?? false, + channel, + }, + }; + convertIfPresent(endpoint.eventTrigger, opts, "eventFilters", "filters"); + copyIfPresent(endpoint.eventTrigger, opts, "retry"); + + func.__endpoint = endpoint; + + func.__requiredAPIs = [ + { + api: "eventarcpublishing.googleapis.com", + reason: "Needed for custom event functions", + }, + ]; + + return func; +} diff --git a/src/v2/providers/firestore.ts b/src/v2/providers/firestore.ts new file mode 100644 index 000000000..1adf1478d --- /dev/null +++ b/src/v2/providers/firestore.ts @@ -0,0 +1,749 @@ +// The MIT License (MIT) +// +// Copyright (c) 2023 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as firestore from "firebase-admin/firestore"; +import * as logger from "../../logger"; +import { ParamsOf } from "../../common/params"; +import { normalizePath } from "../../common/utilities/path"; +import { PathPattern } from "../../common/utilities/path-pattern"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { Change, CloudEvent, CloudFunction } from "../core"; +import { EventHandlerOptions, getGlobalOptions, optionsToEndpoint } from "../options"; +import { + createBeforeSnapshotFromJson, + createBeforeSnapshotFromProtobuf, + createSnapshotFromJson, + createSnapshotFromProtobuf, +} from "../../common/providers/firestore"; +import { wrapTraceContext } from "../trace"; +import { withInit } from "../../common/onInit"; +import { Expression } from "../../params"; + +export { Change }; + +/** @internal */ +export const writtenEventType = "google.cloud.firestore.document.v1.written"; + +/** @internal */ +export const createdEventType = "google.cloud.firestore.document.v1.created"; + +/** @internal */ +export const updatedEventType = "google.cloud.firestore.document.v1.updated"; + +/** @internal */ +export const deletedEventType = "google.cloud.firestore.document.v1.deleted"; + +/** @internal */ +export const writtenEventWithAuthContextType = + "google.cloud.firestore.document.v1.written.withAuthContext"; + +/** @internal */ +export const createdEventWithAuthContextType = + "google.cloud.firestore.document.v1.created.withAuthContext"; + +/** @internal */ +export const updatedEventWithAuthContextType = + "google.cloud.firestore.document.v1.updated.withAuthContext"; + +/** @internal */ +export const deletedEventWithAuthContextType = + "google.cloud.firestore.document.v1.deleted.withAuthContext"; + +// https://github.com/googleapis/google-cloudevents-nodejs/blob/main/cloud/firestore/v1/DocumentEventData.ts +/** @internal */ +export interface RawFirestoreDocument { + name: string; + fields: Record; + createTime: string; + updateTime: string; +} + +/** @internal */ +export interface RawFirestoreData { + value?: RawFirestoreDocument; + oldValue?: RawFirestoreDocument; + updateMask?: { fieldPaths: Array }; +} + +/** @internal */ +export interface RawFirestoreEvent extends CloudEvent { + location: string; + project: string; + database: string; + namespace: string; + document: string; + datacontenttype?: string; + dataschema: string; +} + +/** @internal */ +export interface RawFirestoreAuthEvent extends RawFirestoreEvent { + authtype?: AuthType; + authid?: string; +} + +/** A Firestore DocumentSnapshot */ +export type DocumentSnapshot = firestore.DocumentSnapshot; + +/** A Firestore QueryDocumentSnapshot */ +export type QueryDocumentSnapshot = firestore.QueryDocumentSnapshot; + +/** + * AuthType defines the possible values for the authType field in a Firestore event with auth context. + * - service_account: a non-user principal used to identify a workload or machine user. + * - api_key: a non-user client API key. + * - system: an obscured identity used when Cloud Platform or another system triggered the event. Examples include a database record which was deleted based on a TTL. + * - unauthenticated: an unauthenticated action. + * - unknown: a general type to capture all other principals not captured in the other auth types. + */ +export type AuthType = "service_account" | "api_key" | "system" | "unauthenticated" | "unknown"; + +/** A CloudEvent that contains a DocumentSnapshot or a Change */ +export interface FirestoreEvent> extends CloudEvent { + /** The location of the Firestore instance */ + location: string; + /** The project identifier */ + project: string; + /** The Firestore database */ + database: string; + /** The Firestore namespace */ + namespace: string; + /** The document path */ + document: string; + /** + * An object containing the values of the path patterns. + * Only named capture groups will be populated - {key}, {key=*}, {key=**} + */ + params: Params; +} + +export interface FirestoreAuthEvent> + extends FirestoreEvent { + /** The type of principal that triggered the event */ + authType: AuthType; + /** The unique identifier for the principal */ + authId?: string; +} + +/** DocumentOptions extend EventHandlerOptions with provided document and optional database and namespace. */ +export interface DocumentOptions extends EventHandlerOptions { + /** The document path */ + document: Document | Expression; + /** The Firestore database */ + database?: string | Expression; + /** The Firestore namespace */ + namespace?: string | Expression; +} + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWritten( + document: Document, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWritten( + opts: DocumentOptions, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWritten( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>> { + return onChangedOperation(writtenEventType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWrittenWithAuthContext( + document: Document, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWrittenWithAuthContext( + opts: DocumentOptions, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is created, updated, or deleted in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param opts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore create, update, or delete occurs. + */ +export function onDocumentWrittenWithAuthContext( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>> { + return onChangedOperation(writtenEventWithAuthContextType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is created in Firestore. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreated( + document: Document, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is created in Firestore. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreated( + opts: DocumentOptions, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is created in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreated( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>> { + return onOperation(createdEventType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is created in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreatedWithAuthContext( + document: Document, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is created in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreatedWithAuthContext( + opts: DocumentOptions, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is created in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore create occurs. + */ +export function onDocumentCreatedWithAuthContext( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>> { + return onOperation(createdEventWithAuthContextType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is updated in Firestore. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdated( + document: Document, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; +/** + * Event handler that triggers when a document is updated in Firestore. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdated( + opts: DocumentOptions, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is updated in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdated( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>> { + return onChangedOperation(updatedEventType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is updated in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdatedWithAuthContext( + document: Document, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is updated in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdatedWithAuthContext( + opts: DocumentOptions, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction | undefined, ParamsOf>>; + +/** + * Event handler that triggers when a document is updated in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore update occurs. + */ +export function onDocumentUpdatedWithAuthContext( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreAuthEvent | undefined, ParamsOf> + ) => any | Promise +): CloudFunction< + FirestoreAuthEvent | undefined, ParamsOf> +> { + return onChangedOperation(updatedEventWithAuthContextType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is deleted in Firestore. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeleted( + document: Document, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is deleted in Firestore. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeleted( + opts: DocumentOptions, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is deleted in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeleted( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreEvent> + ) => any | Promise +): CloudFunction>> { + return onOperation(deletedEventType, documentOrOpts, handler); +} + +/** + * Event handler that triggers when a document is deleted in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param document - The Firestore document path to trigger on. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeletedWithAuthContext( + document: Document, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is deleted in Firestore. + * This trigger also provides the authentication context of the principal who triggered the event. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeletedWithAuthContext( + opts: DocumentOptions, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>>; + +/** + * Event handler that triggers when a document is deleted in Firestore. + * + * @param documentOrOpts - Options or a string document path. + * @param handler - Event handler which is run every time a Firestore delete occurs. + */ +export function onDocumentDeletedWithAuthContext( + documentOrOpts: Document | DocumentOptions, + handler: ( + event: FirestoreAuthEvent> + ) => any | Promise +): CloudFunction>> { + return onOperation(deletedEventWithAuthContextType, documentOrOpts, handler); +} + +/** @internal */ +export function getOpts(documentOrOpts: string | DocumentOptions) { + let document: string | Expression; + let database: string | Expression; + let namespace: string | Expression; + let opts: EventHandlerOptions; + if (typeof documentOrOpts === "string") { + document = normalizePath(documentOrOpts); + database = "(default)"; + namespace = "(default)"; + opts = {}; + } else { + document = + typeof documentOrOpts.document === "string" + ? normalizePath(documentOrOpts.document) + : documentOrOpts.document; + database = documentOrOpts.database || "(default)"; + namespace = documentOrOpts.namespace || "(default)"; + opts = { ...documentOrOpts }; + delete (opts as any).document; + delete (opts as any).database; + delete (opts as any).namespace; + } + + return { + document, + database, + namespace, + opts, + }; +} + +/** @hidden */ +function getPath(event: RawFirestoreEvent): string { + return `projects/${event.project}/databases/${event.database}/documents/${event.document}`; +} + +/** @internal */ +export function createSnapshot(event: RawFirestoreEvent): QueryDocumentSnapshot { + if (event.datacontenttype?.includes("application/protobuf") || Buffer.isBuffer(event.data)) { + return createSnapshotFromProtobuf(event.data as Uint8Array, getPath(event), event.database); + } else if (event.datacontenttype?.includes("application/json")) { + return createSnapshotFromJson( + event.data, + event.source, + (event.data as RawFirestoreData).value?.createTime, + (event.data as RawFirestoreData).value?.updateTime, + event.database + ); + } else { + logger.error( + `Cannot determine payload type, datacontenttype is ${event.datacontenttype}, failing out.` + ); + throw Error("Error: Cannot parse event payload."); + } +} + +/** @internal */ +export function createBeforeSnapshot(event: RawFirestoreEvent): QueryDocumentSnapshot { + if (event.datacontenttype?.includes("application/protobuf") || Buffer.isBuffer(event.data)) { + return createBeforeSnapshotFromProtobuf( + event.data as Uint8Array, + getPath(event), + event.database + ); + } else if (event.datacontenttype?.includes("application/json")) { + return createBeforeSnapshotFromJson( + event.data, + event.source, + (event.data as RawFirestoreData).oldValue?.createTime, + (event.data as RawFirestoreData).oldValue?.updateTime, + event.database + ); + } else { + logger.error( + `Cannot determine payload type, datacontenttype is ${event.datacontenttype}, failing out.` + ); + throw Error("Error: Cannot parse event payload."); + } +} + +/** @internal */ +export function makeParams(document: string, documentPattern: PathPattern) { + return { + ...documentPattern.extractMatches(document), + }; +} + +/** @internal */ +export function makeFirestoreEvent( + eventType: string, + event: RawFirestoreEvent | RawFirestoreAuthEvent, + params: Params +): + | FirestoreEvent + | FirestoreAuthEvent { + const data = event.data + ? eventType === createdEventType || eventType === createdEventWithAuthContextType + ? createSnapshot(event) + : createBeforeSnapshot(event) + : undefined; + const firestoreEvent: FirestoreEvent = { + ...event, + params, + data, + }; + + delete (firestoreEvent as any).datacontenttype; + delete (firestoreEvent as any).dataschema; + + if ("authtype" in event) { + const eventWithAuth = { + ...firestoreEvent, + authType: event.authtype, + authId: event.authid, + }; + delete (eventWithAuth as any).authtype; + delete (eventWithAuth as any).authid; + return eventWithAuth; + } + + return firestoreEvent; +} + +/** @internal */ +export function makeChangedFirestoreEvent( + event: RawFirestoreEvent | RawFirestoreAuthEvent, + params: Params +): + | FirestoreEvent | undefined, Params> + | FirestoreAuthEvent | undefined, Params> { + const data = event.data + ? Change.fromObjects(createBeforeSnapshot(event), createSnapshot(event)) + : undefined; + const firestoreEvent: FirestoreEvent | undefined, Params> = { + ...event, + params, + data, + }; + delete (firestoreEvent as any).datacontenttype; + delete (firestoreEvent as any).dataschema; + + if ("authtype" in event) { + const eventWithAuth = { + ...firestoreEvent, + authType: event.authtype, + authId: event.authid, + }; + delete (eventWithAuth as any).authtype; + delete (eventWithAuth as any).authid; + return eventWithAuth; + } + + return firestoreEvent; +} + +/** @internal */ +export function makeEndpoint( + eventType: string, + opts: EventHandlerOptions, + document: string | Expression, + database: string | Expression, + namespace: string | Expression +): ManifestEndpoint { + const baseOpts = optionsToEndpoint(getGlobalOptions()); + const specificOpts = optionsToEndpoint(opts); + + const eventFilters: Record> = { + database, + namespace, + }; + const eventFilterPathPatterns: Record> = {}; + const maybePattern = + typeof document === "string" ? new PathPattern(document).hasWildcards() : true; + if (maybePattern) { + eventFilterPathPatterns.document = document; + } else { + eventFilters.document = document; + } + + return { + ...initV2Endpoint(getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters, + eventFilterPathPatterns, + retry: opts.retry ?? false, + }, + }; +} + +/** @internal */ +export function onOperation< + Document extends string, + Event extends FirestoreEvent> +>( + eventType: string, + documentOrOpts: Document | DocumentOptions, + handler: (event: Event) => any | Promise +): CloudFunction { + const { document, database, namespace, opts } = getOpts(documentOrOpts); + + // wrap the handler + const func = (raw: CloudEvent) => { + const event = raw as RawFirestoreEvent | RawFirestoreAuthEvent; + const documentPattern = new PathPattern( + typeof document === "string" ? document : document.value() + ); + const params = makeParams(event.document, documentPattern) as unknown as ParamsOf; + const firestoreEvent = makeFirestoreEvent(eventType, event, params); + return wrapTraceContext(withInit(handler))(firestoreEvent); + }; + + func.run = handler; + + func.__endpoint = makeEndpoint(eventType, opts, document, database, namespace); + + return func; +} + +/** @internal */ +export function onChangedOperation< + Document extends string, + Event extends FirestoreEvent, ParamsOf> +>( + eventType: string, + documentOrOpts: Document | DocumentOptions, + handler: (event: Event) => any | Promise +): CloudFunction { + const { document, database, namespace, opts } = getOpts(documentOrOpts); + + // wrap the handler + const func = (raw: CloudEvent) => { + const event = raw as RawFirestoreEvent | RawFirestoreAuthEvent; + const documentPattern = new PathPattern( + typeof document === "string" ? document : document.value() + ); + const params = makeParams(event.document, documentPattern) as unknown as ParamsOf; + const firestoreEvent = makeChangedFirestoreEvent(event, params); + return wrapTraceContext(withInit(handler))(firestoreEvent); + }; + + func.run = handler; + + func.__endpoint = makeEndpoint(eventType, opts, document, database, namespace); + + return func; +} diff --git a/src/v2/providers/https.ts b/src/v2/providers/https.ts new file mode 100644 index 000000000..b5ebc5b05 --- /dev/null +++ b/src/v2/providers/https.ts @@ -0,0 +1,614 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle HTTPS request or callable RPCs. + * @packageDocumentation + */ + +import cors from "cors"; +import * as express from "express"; +import { convertIfPresent, convertInvoker, copyIfPresent } from "../../common/encoding"; +import { wrapTraceContext } from "../trace"; +import { isDebugFeatureEnabled } from "../../common/debug"; +import { ResetValue } from "../../common/options"; +import { + type CallableRequest, + type CallableResponse, + type FunctionsErrorCode, + HttpsError, + onCallHandler, + withErrorHandler, + type Request, + type AuthData, +} from "../../common/providers/https"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { GlobalOptions, SupportedRegion } from "../options"; +import { Expression } from "../../params"; +import { SecretParam } from "../../params/types"; +import * as options from "../options"; +import { withInit } from "../../common/onInit"; +import * as logger from "../../logger"; + +export type { Request, CallableRequest, CallableResponse, FunctionsErrorCode }; +export { HttpsError }; + +/** + * Options that can be set on an onRequest HTTPS function. + */ +export interface HttpsOptions extends Omit { + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** HTTP functions can override global options and can specify multiple regions to deploy to. */ + region?: + | SupportedRegion + | string + | Array + | Expression + | ResetValue; + + /** If true, allows CORS on requests to this function. + * If this is a `string` or `RegExp`, allows requests from domains that match the provided value. + * If this is an `Array`, allows requests from domains matching at least one entry of the array. + * Defaults to true for {@link https.CallableFunction} and false otherwise. + */ + cors?: + | string + | Expression + | Expression + | boolean + | RegExp + | Array; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** + * Invoker to set access control on https functions. + */ + invoker?: "public" | "private" | string | string[]; +} + +/** + * Options that can be set on a callable HTTPS function. + */ +export interface CallableOptions extends HttpsOptions { + /** + * Determines whether Firebase AppCheck is enforced. + * When true, requests with invalid tokens autorespond with a 401 + * (Unauthorized) error. + * When false, requests with invalid tokens set event.app to undefiend. + */ + enforceAppCheck?: boolean; + + /** + * Determines whether Firebase App Check token is consumed on request. Defaults to false. + * + * @remarks + * Set this to true to enable the App Check replay protection feature by consuming the App Check token on callable + * request. Tokens that are found to be already consumed will have request.app.alreadyConsumed property set true. + * + * + * Tokens are only considered to be consumed if it is sent to the App Check service by setting this option to true. + * Other uses of the token do not consume it. + * + * This replay protection feature requires an additional network call to the App Check backend and forces the clients + * to obtain a fresh attestation from the chosen attestation providers. This can therefore negatively impact + * performance and can potentially deplete your attestation providers' quotas faster. Use this feature only for + * protecting low volume, security critical, or expensive operations. + * + * This option does not affect the enforceAppCheck option. Setting the latter to true will cause the callable function + * to automatically respond with a 401 Unauthorized status code when request includes an invalid App Check token. + * When request includes valid but consumed App Check tokens, requests will not be automatically rejected. Instead, + * the request.app.alreadyConsumed property will be set to true and pass the execution to the handler code for making + * further decisions, such as requiring additional security checks or rejecting the request. + */ + consumeAppCheckToken?: boolean; + + /** + * Time in seconds between sending heartbeat messages to keep the connection + * alive. Set to `null` to disable heartbeats. + * + * Defaults to 30 seconds. + */ + heartbeatSeconds?: number | null; + + /** + * (Deprecated) Callback for whether a request is authorized. + * + * Designed to allow reusable auth policies to be passed as an options object. Two built-in reusable policies exist: + * isSignedIn and hasClaim. + * + * @deprecated + */ + authPolicy?: (auth: AuthData | null, data: T) => boolean | Promise; +} + +/** + * @deprecated + * + * An auth policy that requires a user to be signed in. + */ +export const isSignedIn = + () => + (auth: AuthData | null): boolean => + !!auth; + +/** + * @deprecated + * + * An auth policy that requires a user to be both signed in and have a specific claim (optionally with a specific value) + */ +export const hasClaim = + (claim: string, value?: string) => + (auth: AuthData | null): boolean => { + if (!auth) { + return false; + } + if (!(claim in auth.token)) { + return false; + } + return !value || auth.token[claim] === value; + }; + +/** + * Handles HTTPS requests. + */ +export type HttpsFunction = (( + /** An Express request object representing the HTTPS call to the function. */ + req: Request, + /** An Express response object, for this function to respond to callers. */ + res: express.Response +) => void | Promise) & { + /** @alpha */ + __trigger?: unknown; + /** @alpha */ + __endpoint: ManifestEndpoint; +}; + +/** + * Creates a callable method for clients to call using a Firebase SDK. + */ +export interface CallableFunction extends HttpsFunction { + /** Executes the handler function with the provided data as input. Used for unit testing. + * @param data - An input for the handler function. + * @returns The output of the handler function. + */ + run(request: CallableRequest): Return; + + stream( + request: CallableRequest, + response: CallableResponse + ): { stream: AsyncIterable; output: Return }; +} + +/** + * Handles HTTPS requests. + * @param opts - Options to set on this function + * @param handler - A function that takes a {@link https.Request} and response object, same signature as an Express app. + * @returns A function that you can export and deploy. + */ +export function onRequest( + opts: HttpsOptions, + handler: (request: Request, response: express.Response) => void | Promise +): HttpsFunction; +/** + * Handles HTTPS requests. + * @param handler - A function that takes a {@link https.Request} and response object, same signature as an Express app. + * @returns A function that you can export and deploy. + */ +export function onRequest( + handler: (request: Request, response: express.Response) => void | Promise +): HttpsFunction; +export function onRequest( + optsOrHandler: + | HttpsOptions + | ((request: Request, response: express.Response) => void | Promise), + handler?: (request: Request, response: express.Response) => void | Promise +): HttpsFunction { + let opts: HttpsOptions; + if (arguments.length === 1) { + opts = {}; + handler = optsOrHandler as ( + request: Request, + response: express.Response + ) => void | Promise; + } else { + opts = optsOrHandler as HttpsOptions; + } + + handler = withErrorHandler(handler); + + if (isDebugFeatureEnabled("enableCors") || "cors" in opts) { + let origin = opts.cors instanceof Expression ? opts.cors.value() : opts.cors; + if (isDebugFeatureEnabled("enableCors")) { + // Respect `cors: false` to turn off cors even if debug feature is enabled. + origin = opts.cors === false ? false : true; + } + // Arrays cause the access-control-allow-origin header to be dynamic based + // on the origin header of the request. If there is only one element in the + // array, this is unnecessary. + if (Array.isArray(origin) && origin.length === 1) { + origin = origin[0]; + } + const middleware = cors({ origin }); + + const userProvidedHandler = handler; + handler = (req: Request, res: express.Response): void | Promise => { + return new Promise((resolve) => { + res.on("finish", resolve); + middleware(req, res, () => { + resolve(userProvidedHandler(req, res)); + }); + }); + }; + } + + handler = wrapTraceContext(withInit(handler)); + + Object.defineProperty(handler, "__trigger", { + get: () => { + const baseOpts = options.optionsToTriggerAnnotations(options.getGlobalOptions()); + // global options calls region a scalar and https allows it to be an array, + // but optionsToTriggerAnnotations handles both cases. + const specificOpts = options.optionsToTriggerAnnotations(opts as options.GlobalOptions); + const trigger: any = { + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + httpsTrigger: { + allowInsecure: false, + }, + }; + convertIfPresent( + trigger.httpsTrigger, + options.getGlobalOptions(), + "invoker", + "invoker", + convertInvoker + ); + convertIfPresent(trigger.httpsTrigger, opts, "invoker", "invoker", convertInvoker); + return trigger; + }, + }); + + const globalOpts = options.getGlobalOptions(); + const baseOpts = options.optionsToEndpoint(globalOpts); + // global options calls region a scalar and https allows it to be an array, + // but optionsToTriggerAnnotations handles both cases. + const specificOpts = options.optionsToEndpoint(opts as options.GlobalOptions); + const endpoint: Partial = { + ...initV2Endpoint(globalOpts, opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + httpsTrigger: {}, + }; + convertIfPresent(endpoint.httpsTrigger, globalOpts, "invoker", "invoker", convertInvoker); + convertIfPresent(endpoint.httpsTrigger, opts, "invoker", "invoker", convertInvoker); + (handler as HttpsFunction).__endpoint = endpoint; + + return handler as HttpsFunction; +} + +/** + * Declares a callable method for clients to call using a Firebase SDK. + * @param opts - Options to set on this function. + * @param handler - A function that takes a {@link https.CallableRequest}. + * @returns A function that you can export and deploy. + */ +export function onCall, Stream = unknown>( + opts: CallableOptions, + handler: (request: CallableRequest, response?: CallableResponse) => Return +): CallableFunction ? Return : Promise, Stream>; + +/** + * Declares a callable method for clients to call using a Firebase SDK. + * @param handler - A function that takes a {@link https.CallableRequest}. + * @returns A function that you can export and deploy. + */ +export function onCall, Stream = unknown>( + handler: (request: CallableRequest, response?: CallableResponse) => Return +): CallableFunction ? Return : Promise>; +export function onCall, Stream = unknown>( + optsOrHandler: CallableOptions | ((request: CallableRequest) => Return), + handler?: (request: CallableRequest, response?: CallableResponse) => Return +): CallableFunction ? Return : Promise> { + let opts: CallableOptions; + if (arguments.length === 1) { + opts = {}; + handler = optsOrHandler as (request: CallableRequest) => Return; + } else { + opts = optsOrHandler as CallableOptions; + } + + let cors: string | boolean | RegExp | Array | undefined; + if ("cors" in opts) { + if (opts.cors instanceof Expression) { + cors = opts.cors.value(); + } else { + cors = opts.cors; + } + } else { + cors = true; + } + + let origin = isDebugFeatureEnabled("enableCors") ? true : cors; + // Arrays cause the access-control-allow-origin header to be dynamic based + // on the origin header of the request. If there is only one element in the + // array, this is unnecessary. + if (Array.isArray(origin) && origin.length === 1) { + origin = origin[0]; + } + + // fix the length of handler to make the call to handler consistent + const fixedLen = (req: CallableRequest, resp?: CallableResponse) => handler(req, resp); + let func: any = onCallHandler( + { + cors: { origin, methods: "POST" }, + enforceAppCheck: opts.enforceAppCheck ?? options.getGlobalOptions().enforceAppCheck, + consumeAppCheckToken: opts.consumeAppCheckToken, + heartbeatSeconds: opts.heartbeatSeconds, + authPolicy: opts.authPolicy, + }, + fixedLen, + "gcfv2" + ); + + func = wrapTraceContext(withInit(func)); + + Object.defineProperty(func, "__trigger", { + get: () => { + const baseOpts = options.optionsToTriggerAnnotations(options.getGlobalOptions()); + // global options calls region a scalar and https allows it to be an array, + // but optionsToTriggerAnnotations handles both cases. + const specificOpts = options.optionsToTriggerAnnotations(opts); + return { + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + "deployment-callable": "true", + }, + httpsTrigger: { + allowInsecure: false, + }, + }; + }, + }); + + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + // global options calls region a scalar and https allows it to be an array, + // but optionsToEndpoint handles both cases. + const specificOpts = options.optionsToEndpoint(opts); + func.__endpoint = { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + callableTrigger: {}, + }; + + // TODO: in the next major version, do auth/appcheck in these helper methods too. + func.run = withInit(handler); + func.stream = () => { + return { + stream: { + next(): Promise> { + return Promise.reject("Coming soon"); + }, + }, + output: Promise.reject("Coming soon"), + }; + }; + return func; +} + +// To avoid taking a strict dependency on Genkit we will redefine the limited portion of the interface we depend upon. +// A unit test (dev dependency) notifies us of breaking changes. +interface ZodType { + __output: T; +} + +interface GenkitRunOptions { + context?: any; +} + +type GenkitAction< + I extends ZodType = ZodType, + O extends ZodType = ZodType, + S extends ZodType = ZodType +> = { + // NOTE: The return type from run includes trace data that we may one day like to use. + run(input: I["__output"], options: GenkitRunOptions): Promise<{ result: O["__output"] }>; + stream( + input: I["__output"], + options: GenkitRunOptions + ): { stream: AsyncIterable; output: Promise }; + + __action: { + name: string; + }; +}; + +type ActionInput = F extends GenkitAction + ? I["__output"] + : never; +type ActionOutput = F extends GenkitAction< + any, + infer O extends ZodType, + any +> + ? O["__output"] + : never; +type ActionStream = F extends GenkitAction< + any, + any, + infer S extends ZodType +> + ? S["__output"] + : never; + +export function onCallGenkit( + action: A +): CallableFunction, Promise>, ActionStream>; +export function onCallGenkit( + opts: CallableOptions>, + flow: A +): CallableFunction, Promise>, ActionStream>; +export function onCallGenkit( + optsOrAction: A | CallableOptions>, + action?: A +): CallableFunction, Promise>, ActionStream> { + let opts: CallableOptions>; + if (arguments.length === 2) { + opts = optsOrAction as CallableOptions>; + } else { + opts = {}; + action = optsOrAction as A; + } + if (!opts.secrets?.length) { + logger.debug( + `Genkit function for ${action.__action.name} is not bound to any secret. This may mean that you are not storing API keys as a secret or that you are not binding your secret to this function. See https://firebase.google.com/docs/functions/config-env?gen=2nd#secret_parameters for more information.` + ); + } + const cloudFunction = onCall, Promise>, ActionStream>( + opts, + async (req, res) => { + const context: Omit = {}; + copyIfPresent(context, req, "auth", "app", "instanceIdToken"); + + if (!req.acceptsStreaming) { + const { result } = await action.run(req.data, { context }); + return result; + } + + const { stream, output } = action.stream(req.data, { context }); + for await (const chunk of stream) { + await res.sendChunk(chunk); + } + return output; + } + ); + + cloudFunction.__endpoint.callableTrigger.genkitAction = action.__action.name; + return cloudFunction; +} diff --git a/src/v2/providers/identity.ts b/src/v2/providers/identity.ts new file mode 100644 index 000000000..755cbf93f --- /dev/null +++ b/src/v2/providers/identity.ts @@ -0,0 +1,373 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle events from Google Cloud Identity Platform. + * @packageDocumentation + */ +import { ResetValue } from "../../common/options"; +import { + AuthBlockingEvent, + AuthBlockingEventType, + AuthUserRecord, + BeforeCreateResponse, + BeforeSignInResponse, + BeforeEmailResponse, + BeforeSmsResponse, + HandlerV2, + HttpsError, + wrapHandler, + MaybeAsync, +} from "../../common/providers/identity"; +import { BlockingFunction } from "../../v1/cloud-functions"; +import { wrapTraceContext } from "../trace"; +import { Expression } from "../../params"; +import { initV2Endpoint } from "../../runtime/manifest"; +import * as options from "../options"; +import { SecretParam } from "../../params/types"; +import { withInit } from "../../common/onInit"; + +export { HttpsError }; +export type { AuthUserRecord, AuthBlockingEvent }; + +/** @hidden Internally used when parsing the options. */ +interface InternalOptions { + opts: options.GlobalOptions; + idToken: boolean; + accessToken: boolean; + refreshToken: boolean; +} + +/** + * All function options plus idToken, accessToken, and refreshToken. + */ +export interface BlockingOptions { + /** Pass the ID Token credential to the function. */ + idToken?: boolean; + + /** Pass the Access Token credential to the function. */ + accessToken?: boolean; + + /** Pass the Refresh Token credential to the function. */ + refreshToken?: boolean; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; +} + +/** + * Handles an event that is triggered before a user is created. + * @param handler - Event handler which is run every time before a user is created. + */ +export function beforeUserCreated( + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before a user is created. + * @param opts - Object containing function options. + * @param handler - Event handler which is run every time before a user is created. + */ +export function beforeUserCreated( + opts: BlockingOptions, + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before a user is created. + * @param optsOrHandler - Either an object containing function options, or an event handler (run before user creation). + * @param handler? - If defined, an event handler which is run every time before a user is created. + */ +export function beforeUserCreated( + optsOrHandler: + | BlockingOptions + | ((event: AuthBlockingEvent) => MaybeAsync), + handler?: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction { + return beforeOperation("beforeCreate", optsOrHandler, handler); +} + +/** + * Handles an event that is triggered before a user is signed in. + * @param handler - Event handler which is run every time before a user is signed in. + */ +export function beforeUserSignedIn( + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before a user is signed in. + * @param opts - Object containing function options. + * @param handler - Event handler which is run every time before a user is signed in. + */ +export function beforeUserSignedIn( + opts: BlockingOptions, + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before a user is signed in. + * @param optsOrHandler - Either an object containing function options, or an event handler (run before user signin). + * @param handler - Event handler which is run every time before a user is signed in. + */ +export function beforeUserSignedIn( + optsOrHandler: + | BlockingOptions + | ((event: AuthBlockingEvent) => MaybeAsync), + handler?: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction { + return beforeOperation("beforeSignIn", optsOrHandler, handler); +} + +/** + * Handles an event that is triggered before an email is sent to a user. + * @param handler - Event handler that is run before an email is sent to a user. + */ +export function beforeEmailSent( + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before an email is sent to a user. + * @param opts - Object containing function options. + * @param handler - Event handler that is run before an email is sent to a user. + */ +export function beforeEmailSent( + opts: Omit, + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before an email is sent to a user. + * @param optsOrHandler- Either an object containing function options, or an event handler that is run before an email is sent to a user. + * @param handler - Event handler that is run before an email is sent to a user. + */ +export function beforeEmailSent( + optsOrHandler: + | Omit + | ((event: AuthBlockingEvent) => MaybeAsync), + handler?: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction { + return beforeOperation("beforeSendEmail", optsOrHandler, handler); +} +/** + * Handles an event that is triggered before an SMS is sent to a user. + * @param handler - Event handler that is run before an SMS is sent to a user. + */ +export function beforeSmsSent( + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before an SMS is sent to a user. + * @param opts - Object containing function options. + * @param handler - Event handler that is run before an SMS is sent to a user. + */ +export function beforeSmsSent( + opts: Omit, + handler: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction; + +/** + * Handles an event that is triggered before an SMS is sent to a user. + * @param optsOrHandler - Either an object containing function options, or an event handler that is run before an SMS is sent to a user. + * @param handler - Event handler that is run before an SMS is sent to a user. + */ +export function beforeSmsSent( + optsOrHandler: + | Omit + | ((event: AuthBlockingEvent) => MaybeAsync), + handler?: (event: AuthBlockingEvent) => MaybeAsync +): BlockingFunction { + return beforeOperation("beforeSendSms", optsOrHandler, handler); +} + +/** @hidden */ +export function beforeOperation( + eventType: AuthBlockingEventType, + optsOrHandler: + | BlockingOptions + | (( + event: AuthBlockingEvent + ) => MaybeAsync< + BeforeCreateResponse | BeforeSignInResponse | BeforeEmailResponse | BeforeSmsResponse | void + >), + handler: HandlerV2 +): BlockingFunction { + if (!handler || typeof optsOrHandler === "function") { + handler = optsOrHandler as ( + event: AuthBlockingEvent + ) => MaybeAsync< + BeforeCreateResponse | BeforeSignInResponse | BeforeEmailResponse | BeforeSmsResponse | void + >; + optsOrHandler = {}; + } + + const { opts, ...blockingOptions } = getOpts(optsOrHandler); + + // Create our own function that just calls the provided function so we know for sure that + // handler takes one argument. This is something common/providers/identity depends on. + const annotatedHandler = Object.assign(handler, { platform: "gcfv2" as const }); + const func: any = wrapTraceContext(withInit(wrapHandler(eventType, annotatedHandler))); + + const legacyEventType = `providers/cloud.auth/eventTypes/user.${eventType}`; + + /** Endpoint */ + const baseOptsEndpoint = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOptsEndpoint = options.optionsToEndpoint(opts); + func.__endpoint = { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOptsEndpoint, + ...specificOptsEndpoint, + labels: { + ...baseOptsEndpoint?.labels, + ...specificOptsEndpoint?.labels, + }, + blockingTrigger: { + eventType: legacyEventType, + options: { + ...((eventType === "beforeCreate" || eventType === "beforeSignIn") && blockingOptions), + }, + }, + }; + + func.__requiredAPIs = [ + { + api: "identitytoolkit.googleapis.com", + reason: "Needed for auth blocking functions", + }, + ]; + + func.run = handler; + + return func; +} + +/** @hidden */ +export function getOpts(blockingOptions: BlockingOptions): InternalOptions { + const accessToken = blockingOptions.accessToken || false; + const idToken = blockingOptions.idToken || false; + const refreshToken = blockingOptions.refreshToken || false; + const opts = { ...blockingOptions }; + delete (opts as any).accessToken; + delete (opts as any).idToken; + delete (opts as any).refreshToken; + return { + opts, + accessToken, + idToken, + refreshToken, + }; +} diff --git a/src/v2/providers/pubsub.ts b/src/v2/providers/pubsub.ts new file mode 100644 index 000000000..5ae982185 --- /dev/null +++ b/src/v2/providers/pubsub.ts @@ -0,0 +1,355 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle events from Google Cloud Pub/Sub. + * @packageDocumentation + */ + +import { copyIfPresent } from "../../common/encoding"; +import { ResetValue } from "../../common/options"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { wrapTraceContext } from "../trace"; +import { Expression } from "../../params"; +import * as options from "../options"; +import { SecretParam } from "../../params/types"; +import { withInit } from "../../common/onInit"; + +/** + * Google Cloud Pub/Sub is a globally distributed message bus that automatically scales as you need it. + * You can create a function ({@link onMessagePublished}) that handles pub/sub events by using functions.pubsub. + * + * This function triggers whenever a new pub/sub message is sent to a specific topic. + * You must specify the Pub/Sub topic name that you want to trigger your function, and set the event within the + * onPublish() event handler. + * + * PubSub Topic: + *
    + *
  • A resource that you can publish messages to and then consume those messages via subscriptions. + *
  • An isolated data stream for pub/sub messages. + *
  • Messages are published to a topic. + *
  • Messages are listened to via a subscription. + *
  • Each subscription listens to the messages published to exactly one topic. + * + * Subscriptions - Resource that listens to the messages published by exactly one topic. + * + * [More info here](https://firebase.google.com/docs/functions/pubsub-events) + */ + +/** + * Interface representing a Google Cloud Pub/Sub message. + * + * @param data - Payload of a Pub/Sub message. + * @typeParam T - Type representing `Message.data`'s JSON format + */ +export class Message { + /** + * Autogenerated ID that uniquely identifies this message. + */ + readonly messageId: string; + + /** + * Time the message was published + */ + readonly publishTime: string; + + /** + * The data payload of this message object as a base64-encoded string. + */ + readonly data: string; + + /** + * User-defined attributes published with the message, if any. + */ + readonly attributes: { [key: string]: string }; + + /** + * User-defined key used to ensure ordering amongst messages with the same key. + */ + readonly orderingKey: string; + + /** @hidden */ + private _json: T; + + /** + * @hidden + * @alpha + */ + constructor(data: any) { + this.messageId = data.messageId; + this.data = data.data; + this.attributes = data.attributes || {}; + this.orderingKey = data.orderingKey || ""; + this.publishTime = data.publishTime || new Date().toISOString(); + this._json = data.json; + } + + /** + * The JSON data payload of this message object, if any. + */ + get json(): T { + if (typeof this._json === "undefined") { + try { + this._json = JSON.parse(Buffer.from(this.data, "base64").toString("utf8")); + } catch (err) { + throw new Error(`Unable to parse Pub/Sub message data as JSON: ${err.message}`); + } + } + + return this._json; + } + + /** + * Returns a JSON-serializable representation of this object. + * + * @returns A JSON-serializable representation of this object. + */ + toJSON(): any { + const json: Record = { + messageId: this.messageId, + data: this.data, + publishTime: this.publishTime, + }; + if (Object.keys(this.attributes).length) { + json.attributes = this.attributes; + } + if (this.orderingKey) { + json.orderingKey = this.orderingKey; + } + return json; + } +} + +/** + * The interface published in a Pub/Sub publish subscription. + * @typeParam T - Type representing `Message.data`'s JSON format + */ +export interface MessagePublishedData { + /** Google Cloud Pub/Sub message. */ + readonly message: Message; + /** A subscription resource. */ + readonly subscription: string; +} + +/** PubSubOptions extend EventHandlerOptions but must include a topic. */ +export interface PubSubOptions extends options.EventHandlerOptions { + /** The Pub/Sub topic to watch for message events */ + topic: string; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Handle a message being published to a Pub/Sub topic. + * @param topic - The Pub/Sub topic to watch for message events. + * @param handler - runs every time a Cloud Pub/Sub message is published + * @typeParam T - Type representing `Message.data`'s JSON format + */ +export function onMessagePublished( + topic: string, + handler: (event: CloudEvent>) => any | Promise +): CloudFunction>>; + +/** + * Handle a message being published to a Pub/Sub topic. + * @param options - Option containing information (topic) for event + * @param handler - runs every time a Cloud Pub/Sub message is published + * @typeParam T - Type representing `Message.data`'s JSON format + */ +export function onMessagePublished( + options: PubSubOptions, + handler: (event: CloudEvent>) => any | Promise +): CloudFunction>>; + +/** + * Handle a message being published to a Pub/Sub topic. + * @param topicOrOptions - A string representing the PubSub topic or an option (which contains the topic) + * @param handler - runs every time a Cloud Pub/Sub message is published + * @typeParam T - Type representing `Message.data`'s JSON format + */ +export function onMessagePublished( + topicOrOptions: string | PubSubOptions, + handler: (event: CloudEvent>) => any | Promise +): CloudFunction>> { + let topic: string; + let opts: options.EventHandlerOptions; + if (typeof topicOrOptions === "string") { + topic = topicOrOptions; + opts = {}; + } else { + topic = topicOrOptions.topic; + opts = { ...topicOrOptions }; + delete (opts as any).topic; + } + + const func = (raw: CloudEvent) => { + const messagePublishedData = raw.data as { + message: unknown; + subscription: string; + }; + messagePublishedData.message = new Message(messagePublishedData.message); + return wrapTraceContext(withInit(handler))(raw as CloudEvent>); + }; + + func.run = handler; + + Object.defineProperty(func, "__trigger", { + get: () => { + const baseOpts = options.optionsToTriggerAnnotations(options.getGlobalOptions()); + const specificOpts = options.optionsToTriggerAnnotations(opts); + + return { + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType: "google.cloud.pubsub.topic.v1.messagePublished", + resource: `projects/${process.env.GCLOUD_PROJECT}/topics/${topic}`, + }, + }; + }, + }); + + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOpts = options.optionsToEndpoint(opts); + + const endpoint: ManifestEndpoint = { + ...initV2Endpoint(options.getGlobalOptions(), opts), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType: "google.cloud.pubsub.topic.v1.messagePublished", + eventFilters: { topic }, + retry: opts.retry ?? false, + }, + }; + copyIfPresent(endpoint.eventTrigger, opts, "retry", "retry"); + func.__endpoint = endpoint; + + return func; +} diff --git a/src/v2/providers/remoteConfig.ts b/src/v2/providers/remoteConfig.ts new file mode 100644 index 000000000..a168dff1c --- /dev/null +++ b/src/v2/providers/remoteConfig.ts @@ -0,0 +1,158 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { withInit } from "../../common/onInit"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { EventHandlerOptions, getGlobalOptions, optionsToEndpoint } from "../options"; +import { wrapTraceContext } from "../trace"; + +/** @internal */ +export const eventType = "google.firebase.remoteconfig.remoteConfig.v1.updated"; + +/** All the fields associated with the person/service account that wrote a Remote Config template. */ +export interface ConfigUser { + /** Display name. */ + name: string; + + /** Email address. */ + email: string; + + /** Image URL. */ + imageUrl: string; +} + +/** What type of update was associated with the Remote Config template version. */ +export type ConfigUpdateOrigin = + /** Catch-all for unrecognized values. */ + | "REMOTE_CONFIG_UPDATE_ORIGIN_UNSPECIFIED" + /** The update came from the Firebase UI. */ + | "CONSOLE" + /** The update came from the Remote Config REST API. */ + | "REST_API" + /** The update came from the Firebase Admin Node SDK. */ + | "ADMIN_SDK_NODE"; + +/** Where the Remote Config update action originated. */ +export type ConfigUpdateType = + /** Catch-all for unrecognized enum values */ + | "REMOTE_CONFIG_UPDATE_TYPE_UNSPECIFIED" + /** A regular incremental update */ + | "INCREMENTAL_UPDATE" + /** A forced update. The ETag was specified as "*" in an UpdateRemoteConfigRequest request or the "Force Update" button was pressed on the console */ + | "FORCED_UPDATE" + /** A rollback to a previous Remote Config template */ + | "ROLLBACK"; + +/** The data within Firebase Remote Config update events. */ +export interface ConfigUpdateData { + /** The version number of the version's corresponding Remote Config template. */ + versionNumber: number; + + /** When the Remote Config template was written to the Remote Config server. */ + updateTime: string; + + /** Aggregation of all metadata fields about the account that performed the update. */ + updateUser: ConfigUser; + + /** The user-provided description of the corresponding Remote Config template. */ + description: string; + + /** Where the update action originated. */ + updateOrigin: ConfigUpdateOrigin; + + /** What type of update was made. */ + updateType: ConfigUpdateType; + + /** Only present if this version is the result of a rollback, and will be the version number of the Remote Config template that was rolled-back to. */ + rollbackSource: number; +} + +/** + * Event handler which triggers when data is updated in a Remote Config. + * + * @param handler - Event handler which is run every time a Remote Config update occurs. + * @returns A function that you can export and deploy. + */ +export function onConfigUpdated( + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +/** + * Event handler which triggers when data is updated in a Remote Config. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Remote Config update occurs. + * @returns A function that you can export and deploy. + */ +export function onConfigUpdated( + opts: EventHandlerOptions, + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +/** + * Event handler which triggers when data is updated in a Remote Config. + * + * @param optsOrHandler - Options or an event handler. + * @param handler - Event handler which is run every time a Remote Config update occurs. + * @returns A function that you can export and deploy. + */ +export function onConfigUpdated( + optsOrHandler: + | EventHandlerOptions + | ((event: CloudEvent) => any | Promise), + handler?: (event: CloudEvent) => any | Promise +): CloudFunction> { + if (typeof optsOrHandler === "function") { + handler = optsOrHandler as (event: CloudEvent) => any | Promise; + optsOrHandler = {}; + } + + const baseOpts = optionsToEndpoint(getGlobalOptions()); + const specificOpts = optionsToEndpoint(optsOrHandler); + + const func: any = wrapTraceContext( + withInit((raw: CloudEvent) => { + return handler(raw as CloudEvent); + }) + ); + func.run = handler; + + const ep: ManifestEndpoint = { + ...initV2Endpoint(getGlobalOptions(), optsOrHandler), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters: {}, + retry: optsOrHandler.retry ?? false, + }, + }; + func.__endpoint = ep; + + return func; +} diff --git a/src/v2/providers/scheduler.ts b/src/v2/providers/scheduler.ts new file mode 100644 index 000000000..1f8f33c31 --- /dev/null +++ b/src/v2/providers/scheduler.ts @@ -0,0 +1,219 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import * as express from "express"; + +import { copyIfPresent } from "../../common/encoding"; +import { ResetValue } from "../../common/options"; +import { timezone } from "../../common/timezone"; +import { + initV2Endpoint, + initV2ScheduleTrigger, + ManifestEndpoint, + ManifestRequiredAPI, +} from "../../runtime/manifest"; +import { HttpsFunction } from "./https"; +import { wrapTraceContext } from "../trace"; +import { Expression } from "../../params"; +import * as logger from "../../logger"; +import * as options from "../options"; +import { withInit } from "../../common/onInit"; + +/** @hidden */ +interface SeparatedOpts { + schedule: string | Expression; + timeZone?: timezone | Expression | ResetValue; + retryConfig?: { + retryCount?: number | Expression | ResetValue; + maxRetrySeconds?: number | Expression | ResetValue; + minBackoffSeconds?: number | Expression | ResetValue; + maxBackoffSeconds?: number | Expression | ResetValue; + maxDoublings?: number | Expression | ResetValue; + }; + opts: options.GlobalOptions; +} + +/** @internal */ +export function getOpts(args: string | ScheduleOptions): SeparatedOpts { + if (typeof args === "string") { + return { + schedule: args, + opts: {} as options.GlobalOptions, + }; + } + return { + schedule: args.schedule, + timeZone: args.timeZone, + retryConfig: { + retryCount: args.retryCount, + maxRetrySeconds: args.maxRetrySeconds, + minBackoffSeconds: args.minBackoffSeconds, + maxBackoffSeconds: args.maxBackoffSeconds, + maxDoublings: args.maxDoublings, + }, + opts: args as options.GlobalOptions, + }; +} + +/** + * Interface representing a ScheduleEvent that is passed to the function handler. + */ +export interface ScheduledEvent { + /** + * The Cloud Scheduler job name. + * Populated via the X-CloudScheduler-JobName header. + * + * If invoked manually, this field is undefined. + */ + jobName?: string; + + /** + * For Cloud Scheduler jobs specified in the unix-cron format, + * this is the job schedule time in RFC3339 UTC "Zulu" format. + * Populated via the X-CloudScheduler-ScheduleTime header. + * + * If the schedule is manually triggered, this field will be + * the function execution time. + */ + scheduleTime: string; +} + +/** The Cloud Function type for Schedule triggers. */ +export interface ScheduleFunction extends HttpsFunction { + __requiredAPIs?: ManifestRequiredAPI[]; + run(data: ScheduledEvent): void | Promise; +} + +/** Options that can be set on a Schedule trigger. */ +export interface ScheduleOptions extends options.GlobalOptions { + /** The schedule, in Unix Crontab or AppEngine syntax. */ + schedule: string; + + /** The timezone that the schedule executes in. */ + timeZone?: timezone | Expression | ResetValue; + + /** The number of retry attempts for a failed run. */ + retryCount?: number | Expression | ResetValue; + + /** The time limit for retrying. */ + maxRetrySeconds?: number | Expression | ResetValue; + + /** The minimum time to wait before retying. */ + minBackoffSeconds?: number | Expression | ResetValue; + + /** The maximum time to wait before retrying. */ + maxBackoffSeconds?: number | Expression | ResetValue; + + /** The time between will double max doublings times. */ + maxDoublings?: number | Expression | ResetValue; +} + +/** + * Handler for scheduled functions. Triggered whenever the associated + * scheduler job sends a http request. + * @param schedule - The schedule, in Unix Crontab or AppEngine syntax. + * @param handler - A function to execute when triggered. + * @returns A function that you can export and deploy. + */ +export function onSchedule( + schedule: string, + handler: (event: ScheduledEvent) => void | Promise +): ScheduleFunction; + +/** + * Handler for scheduled functions. Triggered whenever the associated + * scheduler job sends a http request. + * @param options - Options to set on scheduled functions. + * @param handler - A function to execute when triggered. + * @returns A function that you can export and deploy. + */ +export function onSchedule( + options: ScheduleOptions, + handler: (event: ScheduledEvent) => void | Promise +): ScheduleFunction; + +/** + * Handler for scheduled functions. Triggered whenever the associated + * scheduler job sends a http request. + * @param args - Either a schedule or an object containing function options. + * @param handler - A function to execute when triggered. + * @returns A function that you can export and deploy. + */ +export function onSchedule( + args: string | ScheduleOptions, + handler: (event: ScheduledEvent) => void | Promise +): ScheduleFunction { + const separatedOpts = getOpts(args); + + const httpFunc = async (req: express.Request, res: express.Response): Promise => { + const event: ScheduledEvent = { + jobName: req.header("X-CloudScheduler-JobName") || undefined, + scheduleTime: req.header("X-CloudScheduler-ScheduleTime") || new Date().toISOString(), + }; + try { + await handler(event); + res.status(200).send(); + } catch (err) { + logger.error((err as Error).message); + res.status(500).send(); + } + }; + const func: any = wrapTraceContext(withInit(httpFunc)); + func.run = handler; + + const globalOpts = options.getGlobalOptions(); + const baseOptsEndpoint = options.optionsToEndpoint(globalOpts); + const specificOptsEndpoint = options.optionsToEndpoint(separatedOpts.opts); + + const ep: ManifestEndpoint = { + ...initV2Endpoint(globalOpts, separatedOpts.opts), + platform: "gcfv2", + ...baseOptsEndpoint, + ...specificOptsEndpoint, + labels: { + ...baseOptsEndpoint?.labels, + ...specificOptsEndpoint?.labels, + }, + scheduleTrigger: initV2ScheduleTrigger(separatedOpts.schedule, globalOpts, separatedOpts.opts), + }; + + copyIfPresent(ep.scheduleTrigger, separatedOpts, "timeZone"); + copyIfPresent( + ep.scheduleTrigger.retryConfig, + separatedOpts.retryConfig, + "retryCount", + "maxRetrySeconds", + "minBackoffSeconds", + "maxBackoffSeconds", + "maxDoublings" + ); + func.__endpoint = ep; + + func.__requiredAPIs = [ + { + api: "cloudscheduler.googleapis.com", + reason: "Needed for scheduled functions.", + }, + ]; + + return func; +} diff --git a/src/v2/providers/storage.ts b/src/v2/providers/storage.ts new file mode 100644 index 000000000..e05187eb1 --- /dev/null +++ b/src/v2/providers/storage.ts @@ -0,0 +1,665 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle events from Google Cloud Storage. + * @packageDocumentation + */ + +import { firebaseConfig } from "../../common/config"; +import { copyIfPresent } from "../../common/encoding"; +import { ResetValue } from "../../common/options"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { wrapTraceContext } from "../trace"; +import { Expression } from "../../params"; +import * as options from "../options"; +import { SecretParam } from "../../params/types"; +import { withInit } from "../../common/onInit"; + +/** + * An object within Google Cloud Storage. + * Ref: https://github.com/googleapis/google-cloudevents-nodejs/blob/main/cloud/storage/v1/StorageObjectData.ts + */ +export interface StorageObjectData { + /** + * The name of the bucket containing this object. + */ + bucket: string; + /** + * Cache-Control directive for the object data, matching + * [https://tools.ietf.org/html/rfc7234#section-5.2"][RFC 7234 §5.2]. + */ + cacheControl?: string; + /** + * Number of underlying components that make up this object. Components are + * accumulated by compose operations. + * Attempting to set this field will result in an error. + */ + componentCount?: number; + /** + * Content-Disposition of the object data, matching + * [https://tools.ietf.org/html/rfc6266][RFC 6266]. + */ + contentDisposition?: string; + /** + * Content-Encoding of the object data, matching + * [https://tools.ietf.org/html/rfc7231#section-3.1.2.2][RFC 7231 §3.1.2.2] + */ + contentEncoding?: string; + /** + * Content-Language of the object data, matching + * [https://tools.ietf.org/html/rfc7231#section-3.1.3.2][RFC 7231 §3.1.3.2]. + */ + contentLanguage?: string; + /** + * Content-Type of the object data, matching + * [https://tools.ietf.org/html/rfc7231#section-3.1.1.5][RFC 7231 §3.1.1.5]. + * If an object is stored without a Content-Type, it is served as + * `application/octet-stream`. + */ + contentType?: string; + /** + * CRC32c checksum. For more information about using the CRC32c + * checksum, see + * [https://cloud.google.com/storage/docs/hashes-etags#_JSONAPI][Hashes and + * ETags: Best Practices]. + */ + crc32c?: string; + /** + * Metadata of customer-supplied encryption key, if the object is encrypted by + * such a key. + */ + customerEncryption?: CustomerEncryption; + /** + * HTTP 1.1 Entity tag for the object. See + * [https://tools.ietf.org/html/rfc7232#section-2.3][RFC 7232 §2.3]. + */ + etag?: string; + /** + * The content generation of this object. Used for object versioning. + * Attempting to set this field will result in an error. + */ + generation: number; + /** + * The ID of the object, including the bucket name, object name, and + * generation number. + */ + id: string; + /** + * The kind of item this is. For objects, this is always "storage#object". + */ + kind?: string; + /** + * MD5 hash of the data; encoded using base64 as per + * [https://tools.ietf.org/html/rfc4648#section-4][RFC 4648 §4]. For more + * information about using the MD5 hash, see + * [https://cloud.google.com/storage/docs/hashes-etags#_JSONAPI][Hashes and + * ETags: Best Practices]. + */ + md5Hash?: string; + /** + * Media download link. + */ + mediaLink?: string; + /** + * User-provided metadata, in key/value pairs. + */ + metadata?: { [key: string]: string }; + /** + * The version of the metadata for this object at this generation. Used for + * preconditions and for detecting changes in metadata. A metageneration + * number is only meaningful in the context of a particular generation of a + * particular object. + */ + metageneration: number; + /** + * The name of the object. + */ + name: string; + /** + * The link to this object. + */ + selfLink?: string; + /** + * Content-Length of the object data in bytes, matching + * [https://tools.ietf.org/html/rfc7230#section-3.3.2][RFC 7230 §3.3.2]. + */ + size: number; + /** + * Storage class of the object. + */ + storageClass: string; + /** + * The creation time of the object. + * Attempting to set this field will result in an error. + */ + timeCreated?: Date | string; + /** + * The deletion time of the object. Will be returned if and only if this + * version of the object has been deleted. + */ + timeDeleted?: Date | string; + /** + * The time at which the object's storage class was last changed. + */ + timeStorageClassUpdated?: Date | string; + /** + * The modification time of the object metadata. + */ + updated?: Date | string; +} + +/** + * Metadata of customer-supplied encryption key, if the object is encrypted by + * such a key. + */ +export interface CustomerEncryption { + /** + * The encryption algorithm. + */ + encryptionAlgorithm?: string; + /** + * SHA256 hash value of the encryption key. + */ + keySha256?: string; +} + +/** A CloudEvent that contains StorageObjectData */ +export interface StorageEvent extends CloudEvent { + /** The name of the bucket containing this object. */ + bucket: string; +} + +/** @internal */ +export const archivedEvent = "google.cloud.storage.object.v1.archived"; +/** @internal */ +export const finalizedEvent = "google.cloud.storage.object.v1.finalized"; +/** @internal */ +export const deletedEvent = "google.cloud.storage.object.v1.deleted"; +/** @internal */ +export const metadataUpdatedEvent = "google.cloud.storage.object.v1.metadataUpdated"; + +/** StorageOptions extend EventHandlerOptions with a bucket name */ +export interface StorageOptions extends options.EventHandlerOptions { + /** The name of the bucket containing this object. */ + bucket?: string | Expression; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean | Expression | ResetValue; +} + +/** + * Event handler sent only when a bucket has enabled object versioning. + * This event indicates that the live version of an object has become an + * archived version, either because it was archived or because it was + * overwritten by the upload of an object of the same name. + * + * @param handler - Event handler which is run every time a Google Cloud Storage archival occurs. + */ +export function onObjectArchived( + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler sent only when a bucket has enabled object versioning. + * This event indicates that the live version of an object has become an + * archived version, either because it was archived or because it was + * overwritten by the upload of an object of the same name. + * + * @param bucket - The name of the bucket containing this object. + * @param handler - Event handler which is run every time a Google Cloud Storage archival occurs. + */ +export function onObjectArchived( + bucket: string | Expression, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler sent only when a bucket has enabled object versioning. + * This event indicates that the live version of an object has become an + * archived version, either because it was archived or because it was + * overwritten by the upload of an object of the same name. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Google Cloud Storage archival occurs. + */ +export function onObjectArchived( + opts: StorageOptions, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler sent only when a bucket has enabled object versioning. + * This event indicates that the live version of an object has become an + * archived version, either because it was archived or because it was + * overwritten by the upload of an object of the same name. + * + * @param bucketOrOptsOrHandler - Options or string that may (or may not) define the bucket to be used. + * @param handler - Event handler which is run every time a Google Cloud Storage archival occurs. + */ +export function onObjectArchived( + bucketOrOptsOrHandler: + | string + | Expression + | StorageOptions + | ((event: StorageEvent) => any | Promise), + handler?: (event: StorageEvent) => any | Promise +): CloudFunction { + return onOperation(archivedEvent, bucketOrOptsOrHandler, handler); +} + +/** + * Event handler which fires every time a Google Cloud Storage object + * creation occurs. + * + * Sent when a new object (or a new generation of an existing object) + * is successfully created in the bucket. This includes copying or rewriting + * an existing object. A failed upload does not trigger this event. + * + * @param handler - Event handler which is run every time a Google Cloud Storage object creation occurs. + */ +export function onObjectFinalized( + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage object + * creation occurs. + * + * Sent when a new object (or a new generation of an existing object) + * is successfully created in the bucket. This includes copying or rewriting + * an existing object. A failed upload does not trigger this event. + * + * @param bucket - The name of the bucket containing this object. + * @param handler - Event handler which is run every time a Google Cloud Storage object creation occurs. + */ +export function onObjectFinalized( + bucket: string | Expression, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage object + * creation occurs. + * + * Sent when a new object (or a new generation of an existing object) + * is successfully created in the bucket. This includes copying or rewriting + * an existing object. A failed upload does not trigger this event. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Google Cloud Storage object creation occurs. + */ +export function onObjectFinalized( + opts: StorageOptions, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage object + * creation occurs. + * + * Sent when a new object (or a new generation of an existing object) + * is successfully created in the bucket. This includes copying or rewriting + * an existing object. A failed upload does not trigger this event. + * + * @param bucketOrOptsOrHandler - Options or string that may (or may not) define the bucket to be used. + * @param handler - Event handler which is run every time a Google Cloud Storage object creation occurs. + */ +export function onObjectFinalized( + bucketOrOptsOrHandler: + | string + | Expression + | StorageOptions + | ((event: StorageEvent) => any | Promise), + handler?: (event: StorageEvent) => any | Promise +): CloudFunction { + return onOperation(finalizedEvent, bucketOrOptsOrHandler, handler); +} + +/** + * Event handler which fires every time a Google Cloud Storage deletion occurs. + * + * Sent when an object has been permanently deleted. This includes objects + * that are overwritten or are deleted as part of the bucket's lifecycle + * configuration. For buckets with object versioning enabled, this is not + * sent when an object is archived, even if archival occurs + * via the `storage.objects.delete` method. + * + * @param handler - Event handler which is run every time a Google Cloud Storage object deletion occurs. + */ +export function onObjectDeleted( + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage deletion occurs. + * + * Sent when an object has been permanently deleted. This includes objects + * that are overwritten or are deleted as part of the bucket's lifecycle + * configuration. For buckets with object versioning enabled, this is not + * sent when an object is archived, even if archival occurs + * via the `storage.objects.delete` method. + * + * @param bucket - The name of the bucket containing this object. + * @param handler - Event handler which is run every time a Google Cloud Storage object deletion occurs. + */ +export function onObjectDeleted( + bucket: string | Expression, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage deletion occurs. + * + * Sent when an object has been permanently deleted. This includes objects + * that are overwritten or are deleted as part of the bucket's lifecycle + * configuration. For buckets with object versioning enabled, this is not + * sent when an object is archived, even if archival occurs + * via the `storage.objects.delete` method. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Google Cloud Storage object deletion occurs. + */ +export function onObjectDeleted( + opts: StorageOptions, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time a Google Cloud Storage deletion occurs. + * + * Sent when an object has been permanently deleted. This includes objects + * that are overwritten or are deleted as part of the bucket's lifecycle + * configuration. For buckets with object versioning enabled, this is not + * sent when an object is archived, even if archival occurs + * via the `storage.objects.delete` method. + * + * @param bucketOrOptsOrHandler - Options or string that may (or may not) define the bucket to be used. + * @param handler - Event handler which is run every time a Google Cloud Storage object deletion occurs. + */ +export function onObjectDeleted( + bucketOrOptsOrHandler: + | string + | Expression + | StorageOptions + | ((event: StorageEvent) => any | Promise), + handler?: (event: StorageEvent) => any | Promise +): CloudFunction { + return onOperation(deletedEvent, bucketOrOptsOrHandler, handler); +} + +/** + * Event handler which fires every time the metadata of an existing object + * changes. + * + * @param bucketOrOptsOrHandler - Options or string that may (or may not) define the bucket to be used. + * @param handler - Event handler which is run every time a Google Cloud Storage object metadata update occurs. + */ +export function onObjectMetadataUpdated( + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time the metadata of an existing object + * changes. + * + * @param bucket - The name of the bucket containing this object. + * @param handler - Event handler which is run every time a Google Cloud Storage object metadata update occurs. + */ +export function onObjectMetadataUpdated( + bucket: string | Expression, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time the metadata of an existing object + * changes. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Google Cloud Storage object metadata update occurs. + */ +export function onObjectMetadataUpdated( + opts: StorageOptions, + handler: (event: StorageEvent) => any | Promise +): CloudFunction; + +/** + * Event handler which fires every time the metadata of an existing object + * changes. + * + * @param bucketOrOptsOrHandler - Options or string that may (or may not) define the bucket to be used. + * @param handler - Event handler which is run every time a Google Cloud Storage object metadata update occurs. + */ +export function onObjectMetadataUpdated( + bucketOrOptsOrHandler: + | string + | Expression + | StorageOptions + | ((event: StorageEvent) => any | Promise), + handler?: (event: StorageEvent) => any | Promise +): CloudFunction { + return onOperation(metadataUpdatedEvent, bucketOrOptsOrHandler, handler); +} + +/** @internal */ +export function onOperation( + eventType: string, + bucketOrOptsOrHandler: + | string + | Expression + | StorageOptions + | ((event: StorageEvent) => any | Promise), + handler: (event: StorageEvent) => any | Promise +): CloudFunction { + if (typeof bucketOrOptsOrHandler === "function") { + handler = bucketOrOptsOrHandler as (event: StorageEvent) => any | Promise; + bucketOrOptsOrHandler = {}; + } + + const [opts, bucket] = getOptsAndBucket(bucketOrOptsOrHandler); + + const func = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(raw as StorageEvent); + }; + + func.run = handler; + + Object.defineProperty(func, "__trigger", { + get: () => { + const baseOpts = options.optionsToTriggerAnnotations(options.getGlobalOptions()); + const specificOpts = options.optionsToTriggerAnnotations(opts); + + return { + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + resource: bucket, // TODO(colerogers): replace with 'bucket,' eventually + }, + }; + }, + }); + + // TypeScript doesn't recognize defineProperty as adding a property and complains + // that __endpoint doesn't exist. We can either cast to any and lose all type safety + // or we can just assign a meaningless value before calling defineProperty. + func.__endpoint = {} as ManifestEndpoint; + + // SDK may attempt to read FIREBASE_CONFIG env var to fetch the default bucket name. + // To prevent runtime errors when FIREBASE_CONFIG env var is missing, we use getters. + Object.defineProperty(func, "__endpoint", { + get: () => { + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + const specificOpts = options.optionsToEndpoint(opts); + + const endpoint: ManifestEndpoint = { + platform: "gcfv2", + ...initV2Endpoint(options.getGlobalOptions(), opts), + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters: { bucket }, + retry: opts.retry ?? false, + }, + }; + copyIfPresent(endpoint.eventTrigger, opts, "retry", "retry"); + return endpoint; + }, + }); + + return func; +} + +/** @internal */ +export function getOptsAndBucket( + bucketOrOpts: string | Expression | StorageOptions +): [options.EventHandlerOptions, string | Expression] { + let bucket: string | Expression; + let opts: options.EventHandlerOptions; + // If bucket is a string or Expression + if (typeof bucketOrOpts === "string" || "value" in bucketOrOpts) { + bucket = bucketOrOpts; + opts = {}; + } else { + bucket = bucketOrOpts.bucket || firebaseConfig()?.storageBucket; + opts = { ...bucketOrOpts }; + delete (opts as any).bucket; + } + + if (!bucket) { + throw new Error( + "Missing bucket name. If you are unit testing, please provide a bucket name" + + " by providing bucket name directly in the event handler or by setting process.env.FIREBASE_CONFIG." + ); + } + if (typeof bucket === "string" && !/^[a-z\d][a-z\d\\._-]{1,230}[a-z\d]$/.test(bucket)) { + throw new Error(`Invalid bucket name ${bucket}`); + } + + return [opts, bucket]; +} diff --git a/src/v2/providers/tasks.ts b/src/v2/providers/tasks.ts new file mode 100644 index 000000000..e4e0ca127 --- /dev/null +++ b/src/v2/providers/tasks.ts @@ -0,0 +1,295 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +/** + * Cloud functions to handle Tasks enqueued with Google Cloud Tasks. + * @packageDocumentation + */ + +import { convertIfPresent, convertInvoker, copyIfPresent } from "../../common/encoding"; +import { ResetValue } from "../../common/options"; +import { + AuthData, + onDispatchHandler, + RateLimits, + Request, + RetryConfig, +} from "../../common/providers/tasks"; +import * as options from "../options"; +import { wrapTraceContext } from "../trace"; +import { HttpsFunction } from "./https"; +import { Expression } from "../../params"; +import { SecretParam } from "../../params/types"; +import { initV2Endpoint, initTaskQueueTrigger } from "../../runtime/manifest"; +import { withInit } from "../../common/onInit"; + +export type { AuthData, Request, RateLimits, RetryConfig }; + +export interface TaskQueueOptions extends options.EventHandlerOptions { + /** How a task should be retried in the event of a non-2xx return. */ + retryConfig?: RetryConfig; + + /** How congestion control should be applied to the function. */ + rateLimits?: RateLimits; + + /** + * Who can enqueue tasks for this function. + * + * @remakrs + * If left unspecified, only service accounts which have + * `roles/cloudtasks.enqueuer` and `roles/cloudfunctions.invoker` + * will have permissions. + */ + invoker?: "private" | string | string[]; + + /** + * If true, do not deploy or emulate this function. + */ + omit?: boolean | Expression; + + /** + * Region where functions should be deployed. + */ + region?: options.SupportedRegion | string | Expression | ResetValue; + + /** + * Amount of memory to allocate to a function. + */ + memory?: options.MemoryOption | Expression | ResetValue; + + /** + * Timeout for the function in seconds, possible values are 0 to 540. + * HTTPS functions can specify a higher timeout. + * + * @remarks + * The minimum timeout for a gen 2 function is 1s. The maximum timeout for a + * function depends on the type of function: Event handling functions have a + * maximum timeout of 540s (9 minutes). HTTPS and callable functions have a + * maximum timeout of 3,600s (1 hour). Task queue functions have a maximum + * timeout of 1,800s (30 minutes) + */ + timeoutSeconds?: number | Expression | ResetValue; + + /** + * Min number of actual instances to be running at a given time. + * + * @remarks + * Instances will be billed for memory allocation and 10% of CPU allocation + * while idle. + */ + minInstances?: number | Expression | ResetValue; + + /** + * Max number of instances to be running in parallel. + */ + maxInstances?: number | Expression | ResetValue; + + /** + * Number of requests a function can serve at once. + * + * @remarks + * Can only be applied to functions running on Cloud Functions v2. + * A value of null restores the default concurrency (80 when CPU >= 1, 1 otherwise). + * Concurrency cannot be set to any value other than 1 if `cpu` is less than 1. + * The maximum value for concurrency is 1,000. + */ + concurrency?: number | Expression | ResetValue; + + /** + * Fractional number of CPUs to allocate to a function. + * + * @remarks + * Defaults to 1 for functions with <= 2GB RAM and increases for larger memory sizes. + * This is different from the defaults when using the gcloud utility and is different from + * the fixed amount assigned in Google Cloud Functions generation 1. + * To revert to the CPU amounts used in gcloud or in Cloud Functions generation 1, set this + * to the value "gcf_gen1" + */ + cpu?: number | "gcf_gen1"; + + /** + * Connect cloud function to specified VPC connector. + */ + vpcConnector?: string | Expression | ResetValue; + + /** + * Egress settings for VPC connector. + */ + vpcConnectorEgressSettings?: options.VpcEgressSetting | ResetValue; + + /** + * Specific service account for the function to run as. + */ + serviceAccount?: string | Expression | ResetValue; + + /** + * Ingress settings which control where this function can be called from. + */ + ingressSettings?: options.IngressSetting | ResetValue; + + /** + * User labels to set on the function. + */ + labels?: Record; + + /* + * Secrets to bind to a function. + */ + secrets?: (string | SecretParam)[]; + + /** Whether failed executions should be delivered again. */ + retry?: boolean; +} + +/** + * A handler for tasks. + * @typeParam T - The task data interface. Task data is unmarshaled from JSON. + */ +export interface TaskQueueFunction extends HttpsFunction { + /** + * The callback passed to the `TaskQueueFunction` constructor. + * @param request - A TaskRequest containing data and auth information. + * @returns Any return value. Google Cloud Functions will await any promise + * before shutting down your function. Resolved return values + * are only used for unit testing purposes. + */ + run(request: Request): void | Promise; +} + +/** + * Creates a handler for tasks sent to a Google Cloud Tasks queue. + * @param handler - A callback to handle task requests. + * @typeParam Args - The interface for the request's `data` field. + * @returns A function you can export and deploy. + */ +export function onTaskDispatched( + handler: (request: Request) => void | Promise +): TaskQueueFunction; + +/** + * Creates a handler for tasks sent to a Google Cloud Tasks queue. + * @param options - Configuration for the task queue or Cloud Function. + * @param handler - A callback to handle task requests. + * @typeParam Args - The interface for the request's `data` field. + * @returns A function you can export and deploy. + */ +export function onTaskDispatched( + options: TaskQueueOptions, + handler: (request: Request) => void | Promise +): TaskQueueFunction; +export function onTaskDispatched( + optsOrHandler: TaskQueueOptions | ((request: Request) => void | Promise), + handler?: (request: Request) => void | Promise +): TaskQueueFunction { + let opts: TaskQueueOptions; + if (arguments.length === 1) { + opts = {}; + handler = optsOrHandler as (request: Request) => void | Promise; + } else { + opts = optsOrHandler as TaskQueueOptions; + } + + // onDispatchHandler sniffs the function length to determine which API to present. + // fix the length to prevent api versions from being mismatched. + const fixedLen = (req: Request) => handler(req); + const func: any = wrapTraceContext(withInit(onDispatchHandler(fixedLen))); + + Object.defineProperty(func, "__trigger", { + get: () => { + const baseOpts = options.optionsToTriggerAnnotations(options.getGlobalOptions()); + // global options calls region a scalar and https allows it to be an array, + // but optionsToTriggerAnnotations handles both cases. + const specificOpts = options.optionsToTriggerAnnotations(opts as options.GlobalOptions); + const taskQueueTrigger: Record = {}; + copyIfPresent(taskQueueTrigger, opts, "retryConfig", "rateLimits"); + convertIfPresent( + taskQueueTrigger, + options.getGlobalOptions(), + "invoker", + "invoker", + convertInvoker + ); + convertIfPresent(taskQueueTrigger, opts, "invoker", "invoker", convertInvoker); + return { + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + taskQueueTrigger, + }; + }, + }); + + const baseOpts = options.optionsToEndpoint(options.getGlobalOptions()); + // global options calls region a scalar and https allows it to be an array, + // but optionsToManifestEndpoint handles both cases. + const specificOpts = options.optionsToEndpoint(opts as options.GlobalOptions); + + func.__endpoint = { + platform: "gcfv2", + ...initV2Endpoint(options.getGlobalOptions(), opts), + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + taskQueueTrigger: initTaskQueueTrigger(options.getGlobalOptions(), opts), + }; + + copyIfPresent( + func.__endpoint.taskQueueTrigger.retryConfig, + opts.retryConfig, + "maxAttempts", + "maxBackoffSeconds", + "maxDoublings", + "maxRetrySeconds", + "minBackoffSeconds" + ); + copyIfPresent( + func.__endpoint.taskQueueTrigger.rateLimits, + opts.rateLimits, + "maxConcurrentDispatches", + "maxDispatchesPerSecond" + ); + convertIfPresent( + func.__endpoint.taskQueueTrigger, + options.getGlobalOptions(), + "invoker", + "invoker", + convertInvoker + ); + convertIfPresent(func.__endpoint.taskQueueTrigger, opts, "invoker", "invoker", convertInvoker); + + func.__requiredAPIs = [ + { + api: "cloudtasks.googleapis.com", + reason: "Needed for task queue functions", + }, + ]; + + func.run = handler; + return func; +} diff --git a/src/v2/providers/testLab.ts b/src/v2/providers/testLab.ts new file mode 100644 index 000000000..3b4e5a3c1 --- /dev/null +++ b/src/v2/providers/testLab.ts @@ -0,0 +1,216 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +import { withInit } from "../../common/onInit"; +import { initV2Endpoint, ManifestEndpoint } from "../../runtime/manifest"; +import { CloudEvent, CloudFunction } from "../core"; +import { EventHandlerOptions, getGlobalOptions, optionsToEndpoint } from "../options"; +import { wrapTraceContext } from "../trace"; + +/** @internal */ +export const eventType = "google.firebase.testlab.testMatrix.v1.completed"; + +/** Possible test states for a test matrix. */ +export type TestState = + /** The default value. This value is used if the state is omitted. */ + | "TEST_STATE_UNSPECIFIED" + + /** The test matrix is being validated. */ + | "VALIDATING" + + /** The test matrix is waiting for resources to become available. */ + | "PENDING" + + /** The test matrix has completed normally. */ + | "FINISHED" + + /** The test matrix has completed because of an infrastructure failure. */ + | "ERROR" + + /** The test matrix was not run because the provided inputs are not valid. */ + | "INVALID"; + +/** Outcome summary for a finished test matrix. */ +export type OutcomeSummary = + /** The default value. This value is used if the state is omitted. */ + | "OUTCOME_SUMMARY_UNSPECIFIED" + + /** + * The test matrix run was successful, for instance: + * - All test cases passed. + * - No crash of the application under test was detected. + */ + | "SUCCESS" + + /** + * A run failed, for instance: + * - One or more test case failed. + * - A test timed out. + * - The application under test crashed. + */ + | "FAILURE" + + /** + * Something unexpected happened. The test run should still be considered + * unsuccessful but this is likely a transient problem and re-running the + * test might be successful. + */ + | "INCONCLUSIVE" + + /** All tests were skipped. */ + | "SKIPPED"; + +/** Locations where test results are stored. */ +export interface ResultStorage { + /** + * Tool Results history resource containing test results. Format is + * `projects/{project_id}/histories/{history_id}`. + * See https://firebase.google.com/docs/test-lab/reference/toolresults/rest + * for more information. + */ + toolResultsHistory: string; + + /** + * Tool Results execution resource containing test results. Format is + * `projects/{project_id}/histories/{history_id}/executions/{execution_id}`. + * Optional, can be omitted in erroneous test states. + * See https://firebase.google.com/docs/test-lab/reference/toolresults/rest + * for more information. + */ + toolResultsExecution: string; + + /** URI to the test results in the Firebase Web Console. */ + resultsUri: string; + + /** + * Location in Google Cloud Storage where test results are written to. + * In the form "gs://bucket/path/to/somewhere". + */ + gcsPath: string; +} + +/** Information about the client which invoked the test. */ +export interface ClientInfo { + /** Client name, such as "gcloud". */ + client: string; + + /** Map of detailed information about the client. */ + details: Record; +} + +/** The data within all Firebase test matrix completed events. */ +export interface TestMatrixCompletedData { + /** Time the test matrix was created. */ + createTime: string; + + /** State of the test matrix. */ + state: TestState; + + /** + * Code that describes why the test matrix is considered invalid. Only set for + * matrices in the INVALID state. + */ + invalidMatrixDetails: string; + + /** Outcome summary of the test matrix. */ + outcomeSummary: OutcomeSummary; + + /** Locations where test results are stored. */ + resultStorage: ResultStorage; + + /** Information provided by the client that created the test matrix. */ + clientInfo: ClientInfo; + + /** ID of the test matrix this event belongs to. */ + testMatrixId: string; +} + +/** + * Event handler which triggers when a Firebase test matrix completes. + * + * @param handler - Event handler which is run every time a Firebase test matrix completes. + * @returns A Cloud Function that you can export and deploy. + * @alpha + */ +export function onTestMatrixCompleted( + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +/** + * Event handler which triggers when a Firebase test matrix completes. + * + * @param opts - Options that can be set on an individual event-handling function. + * @param handler - Event handler which is run every time a Firebase test matrix completes. + * @returns A Cloud Function that you can export and deploy. + * @alpha + */ +export function onTestMatrixCompleted( + opts: EventHandlerOptions, + handler: (event: CloudEvent) => any | Promise +): CloudFunction>; + +/** + * Event handler which triggers when a Firebase test matrix completes. + * + * @param optsOrHandler - Options or an event handler. + * @param handler - Event handler which is run every time a Firebase test matrix completes. + * @returns A Cloud Function that you can export and deploy. + * @alpha + */ +export function onTestMatrixCompleted( + optsOrHandler: + | EventHandlerOptions + | ((event: CloudEvent) => any | Promise), + handler?: (event: CloudEvent) => any | Promise +): CloudFunction> { + if (typeof optsOrHandler === "function") { + handler = optsOrHandler as (event: CloudEvent) => any | Promise; + optsOrHandler = {}; + } + + const baseOpts = optionsToEndpoint(getGlobalOptions()); + const specificOpts = optionsToEndpoint(optsOrHandler); + + const func: any = (raw: CloudEvent) => { + return wrapTraceContext(withInit(handler))(raw as CloudEvent); + }; + func.run = handler; + + const ep: ManifestEndpoint = { + ...initV2Endpoint(getGlobalOptions(), optsOrHandler), + platform: "gcfv2", + ...baseOpts, + ...specificOpts, + labels: { + ...baseOpts?.labels, + ...specificOpts?.labels, + }, + eventTrigger: { + eventType, + eventFilters: {}, + retry: optsOrHandler.retry ?? false, + }, + }; + func.__endpoint = ep; + + return func; +} diff --git a/src/v2/trace.ts b/src/v2/trace.ts new file mode 100644 index 000000000..585686b89 --- /dev/null +++ b/src/v2/trace.ts @@ -0,0 +1,33 @@ +import * as express from "express"; + +import { TraceContext, extractTraceContext, traceContext } from "../common/trace"; +import { CloudEvent } from "./core"; + +type HttpsFunction = (req: express.Request, res: express.Response) => void | Promise; +type CloudEventFunction = (raw: CloudEvent) => any | Promise; + +/** + * Wraps v2 handler with trace context. + * @param handler + * + * @internal + */ +export function wrapTraceContext(handler: HttpsFunction): HttpsFunction; +export function wrapTraceContext(handler: CloudEventFunction): CloudEventFunction; +export function wrapTraceContext( + handler: HttpsFunction | CloudEventFunction +): HttpsFunction | CloudEventFunction { + return (...args) => { + let traceParent: TraceContext | undefined; + if (args.length === 1) { + traceParent = extractTraceContext(args[0]); + } else { + traceParent = extractTraceContext(args[0].headers); + } + if (!traceParent) { + // eslint-disable-next-line prefer-spread + return handler.apply(null, args); + } + return traceContext.run(traceParent, handler, ...args); + }; +} diff --git a/testing/README.md b/testing/README.md deleted file mode 100644 index aea2fe460..000000000 --- a/testing/README.md +++ /dev/null @@ -1,12 +0,0 @@ -### Cloud Functions for Firebase testing utilities - -This module is the access point for testing-only utilities for Cloud Functions -for Firebase. We discourage relying on code in this module for your production -application, but encourage using the utilities found here to write effective -unit tests. - -This testing module can be accessed with the following import: - -`import * as testing from 'firebase-functions/testing';` - -TODO(rjh): document testing utilities included here. diff --git a/testing/package.json b/testing/package.json deleted file mode 100644 index d9080a89c..000000000 --- a/testing/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "firebase-functions-testing", - "version": "0.4.1", - "description": "Node helpers for Firebase Functions unit testing. See ../package.json for more information", - "main": "../lib/testing.js", - "author": "Firebase Team", - "license": "MIT", - "typings": "../lib/testing.d.ts" -} diff --git a/tsconfig.json b/tsconfig.json index 573a354fe..b321cbca9 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,17 +1,13 @@ { "compilerOptions": { - "lib": ["es6"], - "module": "commonjs", - "noImplicitAny": true, - "outDir": ".tmp", + "resolveJsonModule": true, "sourceMap": true, - "target": "es6", - "typeRoots": [ - "node_modules/@types" - ] + "emitDeclarationOnly": false }, + "extends": "./tsconfig.release.json", "include": [ - "src/**/*.ts", - "spec/**/*.ts" + "**/*.ts", + ".eslintrc.js", + "integration_test/**/*" ] } diff --git a/tsconfig.release.json b/tsconfig.release.json index b114d4697..33f995b0e 100644 --- a/tsconfig.release.json +++ b/tsconfig.release.json @@ -1,18 +1,21 @@ { + // This config is used by `tsc` to generate .d.ts files only. + // tsdown handles CJS/ESM transpilation but has dts generation issues, + // so we use tsc for type declarations. See tsdown.config.mts for details. "compilerOptions": { "declaration": true, - "lib": ["es6"], + "emitDeclarationOnly": true, + "lib": ["es2022"], "module": "commonjs", "noImplicitAny": false, + "noUnusedLocals": true, "outDir": "lib", "stripInternal": true, - "target": "es6", - "typeRoots": [ - "node_modules/@types" - ] + "target": "es2022", + "useDefineForClassFields": false, + "esModuleInterop": true, + "typeRoots": ["./node_modules/@types"] }, - "files": [ - "src/index.ts", - "src/testing.ts" - ] + "include": ["src/**/*.ts"], + "exclude": ["src/**/*.spec.ts"] } diff --git a/tsdown.config.mts b/tsdown.config.mts new file mode 100644 index 000000000..3f40420bb --- /dev/null +++ b/tsdown.config.mts @@ -0,0 +1,37 @@ +import { defineConfig } from "tsdown"; + +const rewriteProtoPathMjs = { + name: "rewrite-proto-path-mjs", + resolveId(source) { + if (source === "../../../protos/compiledFirestore") { + return { id: "../../../../protos/compiledFirestore.mjs", external: true }; + } + return null; + }, +}; + +// Note: We use tsc (via tsconfig.release.json) for .d.ts generation instead of tsdown's +// built-in dts option due to issues with rolldown-plugin-dts. +// See: https://github.com/sxzz/rolldown-plugin-dts/issues/121 +export default defineConfig([ + { + entry: "src/**/*.ts", + unbundle: true, + format: "cjs", + outDir: "lib", + clean: true, + dts: false, // Use tsc for type declarations + treeshake: false, + external: ["../../../protos/compiledFirestore"], + }, + { + entry: "src/**/*.ts", + unbundle: true, + format: "esm", + outDir: "lib/esm", + clean: false, // Don't clean - need to keep cjs/ output + dts: false, // Use tsc for type declarations + treeshake: false, + plugins: [rewriteProtoPathMjs], + }, +]); \ No newline at end of file diff --git a/tslint.json b/tslint.json index 72dce24d8..2efe809c3 100644 --- a/tslint.json +++ b/tslint.json @@ -1,13 +1,20 @@ { - "extends": "tslint:recommended", + "defaultSeverity": "warning", + "extends": [ + "tslint:recommended", + "tslint-no-unused-expression-chai", + "tslint-config-prettier" + ], "rules": { + "interface-name": false, + "member-access": false, + "no-namespace": false, + "no-console": false, + "object-literal-key-quotes": [true, "as-needed"], + "object-literal-sort-keys": false, "quotemark": [true, "single", "avoid-escape"], - "interface-name": [false], + "trailing-comma": [true, { "functions": "never" }], "variable-name": [true, "check-format", "allow-leading-underscore"], - "object-literal-sort-keys": false, - "whitespace": [true], - "member-access": [false], - "no-console": [false], - "no-namespace": [false] + "whitespace": true } } diff --git a/upgrade-warning b/upgrade-warning deleted file mode 100644 index 9f2ecc220..000000000 --- a/upgrade-warning +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env node - -'use strict'; - -const message = ` -======== WARNING! ======== - -This upgrade of firebase-functions contains breaking changes if you are upgrading from a version below v1.0.0. - -To see a complete list of these breaking changes, please go to: - -https://firebase.google.com/docs/functions/beta-v1-diff -`; - -console.log(message); diff --git a/v1/analytics.js b/v1/analytics.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/analytics.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/auth.js b/v1/auth.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/auth.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/database.js b/v1/database.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/database.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/firestore.js b/v1/firestore.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/firestore.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/index.js b/v1/index.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/index.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/pubsub.js b/v1/pubsub.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/pubsub.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/remoteConfig.js b/v1/remoteConfig.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/remoteConfig.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/storage.js b/v1/storage.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/storage.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/tasks.js b/v1/tasks.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v1/tasks.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v1/testLab.js b/v1/testLab.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v1/testLab.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/alerts/appDistribution.js b/v2/alerts/appDistribution.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/alerts/appDistribution.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/alerts/billing.js b/v2/alerts/billing.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/alerts/billing.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/alerts/crashlytics.js b/v2/alerts/crashlytics.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/alerts/crashlytics.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/alerts/index.js b/v2/alerts/index.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/alerts/index.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/alerts/performance.js b/v2/alerts/performance.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/alerts/performance.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/core.js b/v2/core.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/core.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/database.js b/v2/database.js new file mode 100644 index 000000000..c822b56f1 --- /dev/null +++ b/v2/database.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 \ No newline at end of file diff --git a/v2/dataconnect.js b/v2/dataconnect.js new file mode 100644 index 000000000..3e4b26904 --- /dev/null +++ b/v2/dataconnect.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2025 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/eventarc.js b/v2/eventarc.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/eventarc.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/firestore.js b/v2/firestore.js new file mode 100644 index 000000000..515ddd3b4 --- /dev/null +++ b/v2/firestore.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2023 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/https.js b/v2/https.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/https.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/src/testing.ts b/v2/identity.js similarity index 81% rename from src/testing.ts rename to v2/identity.js index b1b09e8e8..ac9a09d74 100644 --- a/src/testing.ts +++ b/v2/identity.js @@ -1,6 +1,6 @@ // The MIT License (MIT) // -// Copyright (c) 2017 Firebase +// Copyright (c) 2022 Firebase // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -19,11 +19,3 @@ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. - -// This file is an entry point into the testing-only functionality of the -// Firebase Functions SDK. - -// TODO(rjh): provide actual testing functionality. -export function whereAreTheBugs(): string { - return 'Klendathu'; -} diff --git a/v2/index.js b/v2/index.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/index.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/options.js b/v2/options.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/options.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/params.js b/v2/params.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/params.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/pubsub.js b/v2/pubsub.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/pubsub.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/remoteConfig.js b/v2/remoteConfig.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/remoteConfig.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/scheduler.js b/v2/scheduler.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/scheduler.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/storage.js b/v2/storage.js new file mode 100644 index 000000000..7d725acc3 --- /dev/null +++ b/v2/storage.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2021 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/tasks.js b/v2/tasks.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/tasks.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810 diff --git a/v2/testLab.js b/v2/testLab.js new file mode 100644 index 000000000..ae33ba821 --- /dev/null +++ b/v2/testLab.js @@ -0,0 +1,26 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 Firebase +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// This file is not part of the firebase-functions SDK. It is used to silence the +// imports eslint plugin until it can understand import paths defined by node +// package exports. +// For more information, see github.com/import-js/eslint-plugin-import/issues/1810