diff --git a/.azure-pipelines/azure-pipelines-data-container.yml b/.azure-pipelines/azure-pipelines-data-container.yml index 5f5a925d769d..1b27baf4137f 100644 --- a/.azure-pipelines/azure-pipelines-data-container.yml +++ b/.azure-pipelines/azure-pipelines-data-container.yml @@ -8,7 +8,7 @@ schedules: pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-22.04 + demands: ImageOverride -equals ubuntu-24.04 variables: IMAGE_TAG_PREFIX: $[format('1.0.{0:HHm}', pipeline.startTime)] diff --git a/.config/1espt/PipelineAutobaseliningConfig.yml b/.config/1espt/PipelineAutobaseliningConfig.yml new file mode 100644 index 000000000000..8306056c1268 --- /dev/null +++ b/.config/1espt/PipelineAutobaseliningConfig.yml @@ -0,0 +1,10 @@ +## DO NOT MODIFY THIS FILE MANUALLY. This is part of auto-baselining from 1ES Pipeline Templates. Go to [https://aka.ms/1espt-autobaselining] for more details. + +pipelines: + 7721: + retail: + source: + credscan: + lastModifiedDate: 2025-05-16 + policheck: + lastModifiedDate: 2025-05-16 diff --git a/.config/guardian/.gdnbaselines b/.config/guardian/.gdnbaselines new file mode 100644 index 000000000000..f0d5821b4ef1 --- /dev/null +++ b/.config/guardian/.gdnbaselines @@ -0,0 +1,141 @@ +{ + "properties": { + "helpUri": "https://eng.ms/docs/microsoft-security/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/microsoft-guardian/general/baselines" + }, + "version": "1.0.0", + "baselines": { + "default": { + "name": "default", + "createdDate": "2025-05-16 21:01:03Z", + "lastUpdatedDate": "2025-05-16 21:01:03Z" + } + }, + "results": { + "66f678ca5f24b175fc27c406310611d1b24f7c6e93015464a6ca10784576b6ed": { + "signature": "66f678ca5f24b175fc27c406310611d1b24f7c6e93015464a6ca10784576b6ed", + "alternativeSignatures": [], + "target": "specification/cognitiveservices/data-plane/TranslatorText/stable/v3.0/TranslatorText.json", + "line": 12, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0060", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "14da33d334bd6dae4e70b78bba2d59d6c7e902c33fc90377a718b404f8fb6151": { + "signature": "14da33d334bd6dae4e70b78bba2d59d6c7e902c33fc90377a718b404f8fb6151", + "alternativeSignatures": [], + "target": "specification/keyvault/data-plane/Microsoft.KeyVault/preview/7.6-preview.1/keys.json", + "line": 1649, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0060", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "47a19db622c1fc4c19a3b2434c5e077931cd0d0cfa6b1fbbdbde075bda9481b7": { + "signature": "47a19db622c1fc4c19a3b2434c5e077931cd0d0cfa6b1fbbdbde075bda9481b7", + "alternativeSignatures": [], + "target": "specification/keyvault/data-plane/Microsoft.KeyVault/preview/7.6-preview.2/keys.json", + "line": 1696, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0060", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "a471bf10714f13c5a04e572b21784278608532b1df6b9088aff1b99be1329563": { + "signature": "a471bf10714f13c5a04e572b21784278608532b1df6b9088aff1b99be1329563", + "alternativeSignatures": [], + "target": "specification/keyvault/data-plane/Microsoft.KeyVault/stable/7.5/keys.json", + "line": 1647, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0060", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "7919cd03b89028fefa170f4a5d6f750fc23470a94e52cbe2a84e2be9b9c7dd54": { + "signature": "7919cd03b89028fefa170f4a5d6f750fc23470a94e52cbe2a84e2be9b9c7dd54", + "alternativeSignatures": [], + "target": "specification/migrateprojects/resource-manager/Microsoft.Migrate/preview/2018-09-01-preview/examples/Solutions_GetConfig.json", + "line": 12, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-AZURE0030", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "f21309c647e842c4ed9beff0e349d27e295b9475cce3b60fba6757e224e74caa": { + "signature": "f21309c647e842c4ed9beff0e349d27e295b9475cce3b60fba6757e224e74caa", + "alternativeSignatures": [], + "target": "specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2017-04-01-preview/examples/Output_Create_DataWarehouse.json", + "line": 16, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0030", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "56768d8b0027f845342354b5f75b7a2b05a44e072adf8c5d4521ff091e984861": { + "signature": "56768d8b0027f845342354b5f75b7a2b05a44e072adf8c5d4521ff091e984861", + "alternativeSignatures": [], + "target": "specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2017-04-01-preview/examples/Output_Create_DataWarehouse.json", + "line": 61, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0030", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "2485ac4150bf9f1e1d6eb90e52fbb3fe890539a83313f20e63a60f3f1157096a": { + "signature": "2485ac4150bf9f1e1d6eb90e52fbb3fe890539a83313f20e63a60f3f1157096a", + "alternativeSignatures": [], + "target": "specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DataWarehouse.json", + "line": 16, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0030", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + }, + "4c6af74e1956243e47cf19e6abd3de23fcbc8cdaee10b6a4752a43e8c3a0e72f": { + "signature": "4c6af74e1956243e47cf19e6abd3de23fcbc8cdaee10b6a4752a43e8c3a0e72f", + "alternativeSignatures": [], + "target": "specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_DataWarehouse.json", + "line": 16, + "memberOf": [ + "default" + ], + "tool": "credscan", + "ruleId": "CSCAN-GENERAL0030", + "createdDate": "2025-05-16 21:01:03Z", + "expirationDate": "2025-11-02 21:02:05Z", + "justification": "This error is baselined with an expiration date of 180 days from 2025-05-16 21:02:05Z" + } + } +} \ No newline at end of file diff --git a/.github/.prettierignore b/.github/.prettierignore new file mode 100644 index 000000000000..56c22b34130f --- /dev/null +++ b/.github/.prettierignore @@ -0,0 +1,16 @@ +# this file +.prettierignore + +# code coverage +coverage + +# specs in test folders +fixtures +specification + +# unmanaged files and folders +copilot-instructions.md +ISSUE_TEMPLATE +policies +prompts +PULL_REQUEST_TEMPLATE diff --git a/.github/.prettierrc.yaml b/.github/.prettierrc.yaml new file mode 100644 index 000000000000..6fa04cdc80ad --- /dev/null +++ b/.github/.prettierrc.yaml @@ -0,0 +1,12 @@ +# Keep in sync with eng/tools/.prettierrc.yaml + +# Aligned with microsoft/typespec +printWidth: 100 + +overrides: + # tsconfig.json is actually parsed as JSONC + - files: + - tsconfig.json + - tsconfig.*.json + options: + parser: jsonc diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index cd4715ac3318..cc61e016a1b3 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,16 +8,12 @@ ######### # Codeowner assignments are made from the _last_ matching entry in CODEOWNERS, so catch-all entries must come first ######### -/specification/*/data-plane/ @Azure/api-stewardship-board # PRLabel: %Schema Registry /specification/schemaregistry/ @hmlam @nickghardwick @lmazuel @deyaaeldeen @JoshLove-msft @swathipil @conniey @minhanh-phan # PRLabel: %Cognitive Services -/dev/cognitiveservices/data-plane/Language/ @assafi @rokulka @ChongTang @annatisch @heaths @deyaaeldeen @kristapratico @mssfang @joseharriaga @minhanh-phan @Azure/api-stewardship-board - -# PRLabel: %FarmBeats -/specification/agrifood/data-plane @Azure/api-stewardship-board +/dev/cognitiveservices/data-plane/Language/ @assafi @rokulka @ChongTang @annatisch @heaths @deyaaeldeen @kristapratico @mssfang @joseharriaga @minhanh-phan # PRLabel: %Analysis Services /specification/analysisservices/ @taiwu @@ -32,14 +28,11 @@ /specification/applicationinsights/ @alexeldeib # PRLabel: %Monitor - Exporter -/specification/applicationinsights/data-plane/Monitor.Exporters/ @ramthi @trask @hectorhdzg @lzchen @Azure/api-stewardship-board +/specification/applicationinsights/data-plane/Monitor.Exporters/ @ramthi @trask @hectorhdzg @lzchen # PRLabel: %Container Apps /specification/app/ @jijohn14 @Juliehzl -# PRLabel: %Code Signing -/specification/codesigning/data-plane @Azure/api-stewardship-board - /specification/asazure/ @athipp # PRLabel: %Authorization @@ -60,16 +53,16 @@ /specification/billing/ @wilcobmsft @asarkar84 # PRLabel: %Network - CDN -/specification/cdn/ @jorinmejia @yunhemsft @jessicl-ms @rrahulms @ChenglongLiu @Ptnan7 +/specification/cdn/ @yunhemsft @jessicl-ms @rrahulms @ChenglongLiu @Ptnan7 @zhuofudeng # PRLabel: %Cognitive Services -/specification/cognitiveservices/ @felixwa @yangyuan +/specification/cognitiveservices/ @rkuthala @MattGal # PRLabel: %Cognitive Services - Form Recognizer /specification/cognitiveservices/data-plane/FormRecognizer/ @bojunehsu @nizi1127 @johanste # PRLabel: %Cognitive - Language -/specification/cognitiveservices/data-plane/Language/ @assafi @rokulka @ChongTang @annatisch @heaths @deyaaeldeen @kristapratico @mssfang @joseharriaga @minhanh-phan @Azure/api-stewardship-board +/specification/cognitiveservices/data-plane/Language/ @assafi @rokulka @ChongTang @annatisch @heaths @deyaaeldeen @kristapratico @mssfang @joseharriaga @minhanh-phan # PRLabel: %Compute /specification/compute/ @bilaakpan-ms @sandido @dkulkarni-ms @haagha @MS-syh2qs @grizzlytheodore @mabhard @danielli90 @smotwani @ppatwa @vikramd-ms @yunusm @ZhidongPeng @nkuchta @maheshnemichand @najams @changov @@ -98,7 +91,7 @@ /specification/customer-insights/ @tjlvtao # PRLabel: %Data Factory -/specification/datafactory/ @Frey-Wang @zhangyd2015 @davidzhaoyue +/specification/datafactory/ @Frey-Wang @ruowan @davidzhaoyue # PRLabel: %Data Lake Analytics /specification/datalake-analytics/ @ro-joowan @@ -107,22 +100,20 @@ /specification/datalake-store/ @ro-joowan # PRLabel: %Data Migration -/specification/datamigration/ @hitenjava +/specification/datamigration/ @hitenjava @gansach @amarjeetkr /specification/deploymentmanager/ @netrock # PRLabel: %Device Registry /specification/deviceregistry/ @marcodalessandro @rohankhandelwal @riteshrao -# PRLabel: %Device Update -/specification/deviceupdate/data-plane/ @Azure/api-stewardship-board - /specification/documentdb/ @dmakwana /specification/domainservices/ @jihochang # PRLabel: %Event Grid -/specification/eventgrid/ @lmazuel @jsquire @JoshLove-msft @l0lawrence +/specification/eventgrid/resource-manager/ @shankarsama @Kishp01 @a-hamad +/specification/eventgrid/ @Kishp01 @shankarsama @rajeshka # PRLabel: %Event Hubs /specification/eventhub/ @v-ajnava @dsouzaarun @damodaravadhani @@ -151,9 +142,6 @@ # PRLabel: %KeyVault /specification/keyvault/data-plane/ @vickm @chen-karen @cheathamb36 @lgonsoulin @heaths -# PRLabel: %Load Test Service -/specification/loadtestservice/data-plane/ @Azure/api-stewardship-board - # PRLabel: %Logic App /specification/logic/ @pankajsn @tonytang-microsoft-com @@ -180,6 +168,9 @@ # PRLabel: %Monitor - Operational Insights /specification/operationalinsights/ @sw47 +# PRLabel: %Online Experimentation +/specification/onlineexperimentation/ @Azure/azure-sdk-write-onlineexperimentation + # PRLabel: %Operations Management /specification/operationsmanagement/ @dashimi16 @@ -191,9 +182,6 @@ /specification/powerbidedicated/ @tarostok -# PRLabel: %Purview -/specification/purview/data-plane @Azure/api-stewardship-board - # PRLabel: %PostgreSQL /specification/postgresql/** @Azure/azure-sdk-write-postgresql @@ -220,10 +208,10 @@ /specification/scheduler/ @pinwang81 # PRLabel: %Search -/specification/search/data-plane/ @arv100kri @bleroy @Azure/api-stewardship-board @BevLoh @giulianob +/specification/search/data-plane/ @arv100kri @bleroy @BevLoh @giulianob # PRLabel: %Search -/specification/search/resource-manager/ @tjacobhi @conor-joplin @BevLoh +/specification/search/resource-manager/ @efrainretana @conor-joplin @BevLoh @xiong-qiao @jonathanserbent @Draconicida @kuanlu95 @admayber /specification/serialconsole/ @amitchat @craigw @asinn826 @@ -241,7 +229,7 @@ # PRLabel: %Storage /specification/storage/resource-manager/ @blueww @yifanz7 -/specification/storage/data-plane/ @seanmcc-msft @Azure/api-stewardship-board +/specification/storage/data-plane/ @seanmcc-msft # PRLabel: %Import Export /specification/storageimportexport/ @leoz-ms @@ -279,12 +267,6 @@ # PRLabel: %Profile /profile/ @shahabhijeet -/specification/**/resource-manager/**/readme.typescript.md @qiaozha -/specification/**/resource-manager/**/readme.az.md @jsntcy @qiaozha -/specification/**/resource-manager/**/readme.cli.md @jsntcy @qiaozha -/specification/**/resource-manager/**/readme.go.md @tadelesh -/specification/**/resource-manager/**/readme.python.md @msyyc - /specification/contosowidgetmanager/ @mikeharder @raych1 @maririos ########### @@ -298,7 +280,16 @@ /tsconfig.json @weshaggard @mikeharder /.azure-pipelines/ @weshaggard @mikeharder @benbp /.github/ @weshaggard @mikeharder @benbp +/.vscode/ @weshaggard @mikeharder @benbp +/dev/ @weshaggard @mikeharder @benbp /eng/ @weshaggard @mikeharder @benbp /eng/common/ @Azure/azure-sdk-eng +/eng/tools/typespec-migration-validation @pshao25 @live1206 +/eng/tools/typespec-validation/src/rules/sdk-tspconfig-validation.ts @wanlwanl @raych1 @maririos /scripts/ @weshaggard @mikeharder +/specification/suppressions.yaml @weshaggard @mikeharder @benbp @raych1 @wanlwanl @maririos /.github/CODEOWNERS @Azure/azure-sdk-eng + +## Copilot +/.github/copilot-instructions.md @praveenkuttappan @maririos +/.github/prompts/ @praveenkuttappan @maririos diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 6b9485377cd9..e435be2e0d76 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,3 +1,3 @@ -### *This is an outdated document. Please refer the new [wiki](https://github.com/Azure/azure-rest-api-specs/wiki) for up to date details.* -([`Link your GitHub account`](https://repos.opensource.microsoft.com/) to the 'Azure' organization for access.) +### _This is an outdated document. Please refer the new [wiki](https://github.com/Azure/azure-rest-api-specs/wiki) for up to date details._ +([`Link your GitHub account`](https://repos.opensource.microsoft.com/) to the 'Azure' organization for access.) diff --git a/.github/PULL_REQUEST_TEMPLATE/control_plane_template.md b/.github/PULL_REQUEST_TEMPLATE/control_plane_template.md index 1bcbe104d811..66fb2119dcf8 100644 --- a/.github/PULL_REQUEST_TEMPLATE/control_plane_template.md +++ b/.github/PULL_REQUEST_TEMPLATE/control_plane_template.md @@ -31,6 +31,7 @@ and followed the instructions by checking all the boxes: [ARM resource provider contract](https://aka.ms/azurerpc) and [REST guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md) (estimated time: 4 hours). I understand this is required before I can proceed to the diagram Step 2, "ARM API changes review", for this PR. +- [ ] A [release plan](https://aka.ms/azsdkdocs/release-plans) has been created. If not, please create one as it will help guide you through the REST API and SDK creation process. ## Additional information @@ -60,3 +61,4 @@ If one or multiple validation error/warning suppression(s) is detected in your P - If the PR CI checks appear to be stuck in `queued` state, please add a comment with contents `/azp run`. This should result in a new comment denoting a `PR validation pipeline` has started and the checks should be updated after few minutes. - If the help provided by the previous points is not enough, post to https://aka.ms/azsdk/support/specreview-channel and link to this PR. +- For guidance on SDK breaking change review, refer to https://aka.ms/ci-fix. diff --git a/.github/PULL_REQUEST_TEMPLATE/data_plane_template.md b/.github/PULL_REQUEST_TEMPLATE/data_plane_template.md index d4f16039081f..0e2d02a58ec8 100644 --- a/.github/PULL_REQUEST_TEMPLATE/data_plane_template.md +++ b/.github/PULL_REQUEST_TEMPLATE/data_plane_template.md @@ -42,6 +42,10 @@ If one or multiple validation error/warning suppression(s) is detected in your P [Swagger-Suppression-Process](https://aka.ms/pr-suppressions) to get approval. +### Release planner + +A [release plan](https://aka.ms/azsdkdocs/release-plans) should have been created. If not, please create one as it will help guide you through the REST API and SDK creation process. + ## ❔Got questions? Need additional info?? We are here to help!
diff --git a/.github/actions/setup-node-install-deps/action.yaml b/.github/actions/setup-node-install-deps/action.yaml index c743ae002085..a9e6a548d9bb 100644 --- a/.github/actions/setup-node-install-deps/action.yaml +++ b/.github/actions/setup-node-install-deps/action.yaml @@ -3,14 +3,14 @@ description: Uses specified Node version and installs dependencies (typically us inputs: node-version: - description: 'Node version to use' + description: "Node version to use" default: 22.x install-command: - description: 'Command to install dependencies' - default: 'npm ci' + description: "Command to install dependencies" + default: "npm ci" working-directory: - description: 'Working directory' - default: '.' + description: "Working directory" + default: "." runs: using: "composite" diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 000000000000..5eea4332f74d --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,221 @@ +## Converting a specification from swagger to typespec + +Users can convert a specification from swagger to typespec by using `tsp-client` a CLI designed to help developers throughout various stages of typespec development. + +### Instructions for converting a specification from swagger to typespec + +1. Install the dependencies specified in the package.json at the root of this repository. Command: + +``` +npm ci +``` + +2. `tsp-client` is installed as part of the dependencies specified at the root of this repository. To convert a swagger to typespec, run the following command: `npx tsp-client convert --swagger-readme ` +3. Now that you have a newly converted typespec project, you should go through all files to verify the accuracy of the converted spec when compared to the original swagger definitions. +4. For both data plane and management plane specifications, you should update the implementation according to the information provided under the [Initial migration checklist](#initial-migration-checklist) section. + +### Initial migration checklist + +The swagger converter will not be able to accurately represent every part of every API in TypeSpec. This document outlines some common changes you may need to make to a converted TypeSpec to make it conform to your existing service API, pass validation checks, and follow best practices. + +- Avoid extensive refactoring of the converted spec. The goal is to get a working spec that can compile successfully and then iteratively improve it. + +- DO ensure your `@service` and `@server` definitions are correct in main.tsp +- DO use the built-in [url][url-type] for endpoint specification. Example: + +```tsp +@server( + "{endpoint}/widget", + "Contoso Widget APIs", + { + /** + * Supported Widget Services endpoints (protocol and hostname, for example: + * https://westus.api.widget.contoso.com). + */ + endpoint: url, + } +) +``` + +- DO ensure that you have a security definition (`@useAuth`) specified for your service. See: [Security definitions in TypeSpec][security-definitions]. The @useAuth decorator should only be defined ONCE in the entire specification above the @server definition. +- AVOID adding new namespaces. +- Make sure the versions enum is declared under the existing namespace defined in main.tsp. Avoid adding it anywhere else. Ensure the `@versioned` decorator is specified over the namespace in main.tsp. Pass the versions enum to the `@versioned` decorator. Example of a typical structure for versioning: + +```tsp +// this is the main.tsp file + + +@versioned(Versions) +namespace Contoso.WidgetManager; +/** Service api versions **/ +enum Versions { + /** The 2023-11-01 api version **/ + v2023_11_01: "2023-11-01", +} +``` + +- All models, enums, unions, and operations should be added under the main namespace declared in the project. +- Avoid having models, enums, unions, operations, and other types declared outside of a namespace. +- If any files are using any of the versioning decorators, such as `@added`, `@removed`, `@changedType`, make sure to import the `@typespec/versioning` library and add a using statement. Example: + +```tsp +import "@typespec/versioning"; + +using TypeSpec.Versioning; +``` + +- DO review all enum and union definitions and add documentation over each enum or union member. See: [Documentation in TypeSpec][docs]. Example of a properly documented enum: + +```tsp +/** The color of a widget. */ +union WidgetColor { + string, + + /** Red Widget Color */ + Red: "Red", + + /** Green Widget Color */ + Green: "Green", + + /** Blue Widget Color */ + Blue: "Blue", +} +``` + +- DO ensure that all models, properties, operations, parameters, enums, unions, and alias definitions have documentation over them. TypeSpec convention recommends using the doc comment format `/** */` to add documentation, example: + +```tsp +/** The color of a widget. */ +model Widget { + /** Widget name */ + name: string +} +``` + +- DO define your visibility decorators with the appropriate value from the Lifecycle class. +- Avoid suppressing warnings +- Operation names should be camel case +- DO use `union` instead of `enum` to define Azure enums. For more information about how to define enums for Azure services see the following documentation: [Defining enums for Azure services][no-enum]. +- DO make client customizations in a `client.tsp` file +- Avoid importing or using `@azure-tools/typespec-client-generator-core` in other files aside from client.tsp. +- DO run `tsp compile .` on your specification and make one attempt to address all warnings. Do not attempt to address warnings more than once even if they aren't resolved. +- Attempt to address any FIXME or TODO comments in the spec. If you are unable to address them, leave them untouched + +#### Additional considerations + +- DO ensure you pull in the latest `main` from the Azure/azure-rest-api-specs repo to stay up to date with latest dependencies +- DO run `npm ci` to get a clean install of the package.json dependencies +- Avoid modifying the package.json or package-lock.json files at the root of the azure-rest-api-specs repo +- Avoid adding your own package.json or package-lock.json files in your project directory +- Avoid adding multiple tspconfig.yaml files for your service specification +- DO consult [ci-fix.md][ci-fix] for fixes to common CI errors reported + +## Troubleshooting tsp compile errors and warnings + +Examples of common errors and warnings that should be addressed after running the `tsp compile` command: + +- If you see an error with a message like: "referencing types from versioned namespace 'Azure.Core.Foundations' but didn't specify which versions with @useDependency", you should add the @useDependency decorator over each api version entry in your api versions enum. Example of a properly configured api versions enum: + +``` +/** Service api versions **/ +enum Versions { + /** The 2023-11-01 api version **/ + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + v2023_11_01: "2023-11-01", +} + +``` + +- If you see an invalid-ref or unknown identifier error you are most likely missing an import to the library that declares that decorator. To find supported libraries and decorators search through the documentation of the following sites: https://azure.github.io/typespec-azure/docs/intro/ and https://typespec.io/docs/ Search through the list of supported decorators, interfaces, and other types per library until you find the correct library to import and/or include a using statement in your typespec files. +- In order to address warnings raised by the @azure-tools/typespec-azure-core search through this page for relevant solutions to apply: https://azure.github.io/typespec-azure/docs/intro/ +- camelCase fixes only apply to the typespec property names, any corresponding string values you should left as is. +- String values in typespec files should be left untouched. + + + +[contoso-widget-manager]: ../specification/contosowidgetmanager/Contoso.WidgetManager/ +[tspconfig]: ../specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml +[security-definitions]: https://azure.github.io/typespec-azure/docs/reference/azure-style-guide#security-definitions +[versioning]: https://typespec.io/docs/libraries/versioning/guide#implementing-versioned-apis +[docs]: https://typespec.io/docs/language-basics/documentation +[ci-fix]: ../documentation/ci-fix.md +[url-type]: https://typespec.io/docs/language-basics/built-in-types#string-types +[no-enum]: https://azure.github.io/typespec-azure/docs/libraries/azure-core/rules/no-enum + + +# SDK generation from TypeSpec + +## Agent context for TypeSpec and SDK process +- Check all open files in the editor and check if `main.tsp` or `tspconfig.yaml` are open in the editor. If either of +these files are open, then use the parent path of the `main.tsp` or `tspconfig.yaml` as default TypeSpec project root +path. +- If `main.tsp` and `tspconfig.yaml` are not open in the editor, then check if there are any TypeSpec project paths in +the context. If there are no TypeSpec project paths in the context, then prompt user to select a TypeSpec project path +from the list of paths. If user does not have a TypeSpec project, then prompt user to create a new TypeSpec project. + + +### Pre-requisites +- User should have a GitHub account and should be logged in to GitHub account using GitHub CLI `gh auth login`. +- run `npm ci` to install the dependencies + + +### Basic Rules for SDK Generation from TypeSpec + +1. **User Guidance**: + - Assume the user is unfamiliar with the SDK release process. Provide clear, concise instructions for each step. + +2. **File Handling**: + - Do not overwrite `tspconfig.yaml` or `main.tsp`. Use existing files and suggest updates if necessary. + - Use the path of the `tspconfig.yaml` file already open in the editor or the `.tsp` file path as the project root. + - If no `.tsp` file or folder is in the current context, prompt the user to select a valid TypeSpec project root path. + +3. **Process Visibility**: + - Highlight all steps in the SDK generation process, showing completed and remaining steps. + - Do not skip any main steps. Ensure all steps are completed before moving to the next. + +4. **Git Operations**: + - Avoid using the `main` branch for pull requests. Prompt the user to create or switch to a new branch if necessary. + - Display git commands (e.g., `git checkout`, `git add`, `git commit`, `git push`) with a "Run" button instead of + asking the user to copy and paste. + - Do not run `git diff` + +5. **Azure-Specific Rules**: + - Always use `Azure` as the repo owner in MCP tool calls. + - Confirm with the user if they want to change the repo owner or target branch, and prompt for new values if needed. + +6. **Exclusions**: + - Exclude changes in `.github` and `.vscode` folders from API spec and SDK pull requests. + +7. **Working Branch Rule**: + - Ensure the TypeSpec project repository and the current working repository are not on the `main` branch: + - Check the current branch name for the cloned GitHub repository: + - If the current branch is `main`, prompt the user to create a new branch using + `git checkout -b `. + - If the current branch is not `main`, prompt the user to either select an existing branch or create a + new one. + - For branch switching: + - If a branch already exists and differs from the current branch, prompt the user to switch using + `git checkout `. + - GitHub pull requests cannot be created from the `main` branch. Ensure all changes are made on a non-`main` branch. + +By following these rules, the SDK release process will remain clear, structured, and user-friendly. + +## Steps to generate SDK from TypeSpec API specification +Follow `/typespec-to-sdk` prompt to generate and release SDK from TypeSpec API specification. The process is divided into several steps, each with specific actions to ensure a smooth SDK generation and release process. +Do not skip the step that choose SDK generation method to ensure the user selects the appropriate method for SDK generation, either locally or using the SDK generation pipeline. Do not repeat the steps. + +1. **Identify TypeSpec Project**: Locate the TypeSpec project root path by checking for `tspconfig.yaml` or `main.tsp` files. +2. **Validate TypeSpec Specification**: Ensure the TypeSpec specification compiles without errors. +3. **Verify Authentication and Repository Status**: Ensure user is authenticated and working in the correct public Azure repository. +4. **Review and Commit Changes**: Stage and commit TypeSpec modifications, ensuring the current branch is not "main". Do not create pull request yet. +5. **Create Specification Pull Request**: Create a pull request for TypeSpec changes if not already created. This is required only if there are TypeSpec changes in current branch. +6. **Choose SDK Generation Method**: Determine how to generate SDKs (locally or via pipeline). Only Python is supported for local SDK generation at this time. +7. **Generate SDKs via Pipeline**: Generate SDKs using `/run-sdk-gen-pipeline` prompt, monitor the pipeline status and displaying generated SDK PR links. +8. **Show generated SDK PR**: Display the generated SDK pull request links for review. +9. **Create a release plan**: Create a release plan for the generated SDKs using spec pull request. +10. **Prompt user to change spec pull request to ready for review from draft status**: Update spec pull request to change it to ready for review. +11. **Release package**: Release the SDK package using `ReleaseSdkPackage` tool. + + +## Release readiness of SDK and information about the release pipeline +Run `/check-package-readiness` prompt to check the release readiness of an SDK package. This prompt will collect the required information from the user, execute the readiness check, and present the results. \ No newline at end of file diff --git a/.github/cspell.yaml b/.github/cspell.yaml index 9fcc52014a45..bdb132159785 100644 --- a/.github/cspell.yaml +++ b/.github/cspell.yaml @@ -1,4 +1,4 @@ -version: '0.2' +version: "0.2" import: - ../cspell.yaml words: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 4aa60704668e..79483d9de8b0 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -16,11 +16,12 @@ updates: - dependency-name: "typescript" # Updated manually by the Liftr team - dependency-name: "@azure-tools/typespec-liftr-base" - # Updated manually by owner of eng/tools/lint-diff - - dependency-name: "@apidevtools/json-schema-ref-parser" - dependency-name: "@types/js-yaml" - dependency-name: "autorest" - dependency-name: "js-yaml" + # Only allow patch updates for spec-gen-sdk + - dependency-name: "@azure-tools/spec-gen-sdk" + update-types: ["version-update:semver-minor", "version-update:semver-major"] groups: # Ships separately from other typespec packages typespec-client-generator-cli: @@ -40,7 +41,8 @@ updates: versioning-strategy: increase-if-necessary - package-ecosystem: "npm" directories: - - "/.github/" + - "/.github" + - "/.github/shared" schedule: interval: "daily" ignore: diff --git a/.github/eslint.config.js b/.github/eslint.config.js index 366b15cfe396..d3c33d8d947d 100644 --- a/.github/eslint.config.js +++ b/.github/eslint.config.js @@ -2,7 +2,4 @@ import pluginJs from "@eslint/js"; import globals from "globals"; /** @type {import('eslint').Linter.Config[]} */ -export default [ - { languageOptions: { globals: globals.node } }, - pluginJs.configs.recommended, -]; +export default [{ languageOptions: { globals: globals.node } }, pluginJs.configs.recommended]; diff --git a/.github/instructions/dp-migration.instructions.md b/.github/instructions/dp-migration.instructions.md new file mode 100644 index 000000000000..9b8673b69b45 --- /dev/null +++ b/.github/instructions/dp-migration.instructions.md @@ -0,0 +1,12 @@ +- DO extend the `@azure-tools/typespec-azure-rulesets/data-plane` linter rule set in your tspconfig.yaml. Example: + +```yaml title=tspconfig.yaml +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/data-plane" +``` + +- A good example of a well structured data plane spec is the ../../specification/contosowidgetmanager/Contoso.WidgetManager/ spec. Use this as a reference for your own spec. +- Avoid importing or using templates from the `@azure-tools/typespec-azure-resource-manager` library in a data-plane specification + +I've recently converted my swagger specification to typespec, can you go through the files of my project and make sure they're following the initial migration checklist? diff --git a/.github/instructions/mgmt-migration.instructions.md b/.github/instructions/mgmt-migration.instructions.md new file mode 100644 index 000000000000..ab5918b4d405 --- /dev/null +++ b/.github/instructions/mgmt-migration.instructions.md @@ -0,0 +1,11 @@ +- DO extend the `@azure-tools/typespec-azure-rulesets/resource-manager` linter rule set in your tspconfig.yaml. Example: + +```yaml title=tspconfig.yaml +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/resource-manager" +``` + +- A good example of a well structured management plane spec is the ../../specification/contosowidgetmanager/Contoso.Management/ spec. Use this as a reference for your own spec. + +I've recently converted my swagger specification to typespec, can you go through the files of my project and make sure they're following the initial migration checklist? diff --git a/.github/package-lock.json b/.github/package-lock.json index 6b29afcbb949..219682f63506 100644 --- a/.github/package-lock.json +++ b/.github/package-lock.json @@ -5,21 +5,26 @@ "packages": { "": { "dependencies": { - "@apidevtools/json-schema-ref-parser": "^11.9.3", + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "debug": "^4.4.0", "js-yaml": "^4.1.0", - "marked": "^15.0.7" + "marked": "^16.0.0", + "simple-git": "^3.27.0" }, "devDependencies": { "@eslint/js": "^9.22.0", "@octokit/webhooks-types": "^7.5.1", "@tsconfig/node20": "^20.1.4", + "@types/debug": "^4.1.12", "@types/github-script": "github:actions/github-script", "@types/js-yaml": "^4.0.9", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "cross-env": "^7.0.3", "eslint": "^9.22.0", "globals": "^16.0.0", "prettier": "~3.5.3", + "semver": "^7.7.1", "typescript": "~5.8.2", "vitest": "^3.0.7" } @@ -46,16 +51,19 @@ } }, "node_modules/@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.1.tgz", + "integrity": "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==", "dev": true, "license": "MIT", "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" + "@octokit/plugin-paginate-rest": "^9.2.2", + "@octokit/plugin-rest-endpoint-methods": "^10.4.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "undici": "^5.28.5" } }, "node_modules/@actions/glob": { @@ -102,26 +110,24 @@ } }, "node_modules/@apidevtools/json-schema-ref-parser": { - "version": "11.9.3", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.9.3.tgz", - "integrity": "sha512-60vepv88RwcJtSHrD6MjIL6Ta3SOYbgfnkHb+ppAVK+o9mXprRtulx7VlRl3lN3bbvysAfCS7WMVfhUYemB0IQ==", - "license": "MIT", + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-14.0.3.tgz", + "integrity": "sha512-XtI3vr6mq5ySDV7j+/ya7m9UDkRYN91NeSM5CBjGE8EZHXTuu5duHMm5emG+X8tmjRCYpEkWpHfxHpVR91owVg==", "dependencies": { - "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.0" }, "engines": { - "node": ">= 16" + "node": ">= 20" }, "funding": { "url": "https://github.com/sponsors/philsturgeon" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, "license": "MIT", "engines": { @@ -129,9 +135,9 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "dev": true, "license": "MIT", "engines": { @@ -139,13 +145,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", - "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz", + "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.27.0" + "@babel/types": "^7.27.7" }, "bin": { "parser": "bin/babel-parser.js" @@ -155,14 +161,14 @@ } }, "node_modules/@babel/types": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", - "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.7.tgz", + "integrity": "sha512-8OLQgDScAOHXnAz2cV+RfzzNMipuLVBz2biuAJFMV9bfkNf393je3VM8CLkjQodW5+iWsSJdSgSWT6rsZoXHPw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -179,9 +185,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", - "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", + "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", "cpu": [ "ppc64" ], @@ -196,9 +202,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", - "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz", + "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", "cpu": [ "arm" ], @@ -213,9 +219,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", - "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", + "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", "cpu": [ "arm64" ], @@ -230,9 +236,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", - "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz", + "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", "cpu": [ "x64" ], @@ -247,9 +253,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", - "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", + "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", "cpu": [ "arm64" ], @@ -264,9 +270,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", - "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", + "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", "cpu": [ "x64" ], @@ -281,9 +287,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", - "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", + "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", "cpu": [ "arm64" ], @@ -298,9 +304,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", - "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", + "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", "cpu": [ "x64" ], @@ -315,9 +321,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", - "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", + "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", "cpu": [ "arm" ], @@ -332,9 +338,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", - "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", + "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", "cpu": [ "arm64" ], @@ -349,9 +355,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", - "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", + "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", "cpu": [ "ia32" ], @@ -366,9 +372,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", - "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", + "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", "cpu": [ "loong64" ], @@ -383,9 +389,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", - "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", + "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", "cpu": [ "mips64el" ], @@ -400,9 +406,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", - "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", + "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", "cpu": [ "ppc64" ], @@ -417,9 +423,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", - "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", + "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", "cpu": [ "riscv64" ], @@ -434,9 +440,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", - "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", + "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", "cpu": [ "s390x" ], @@ -451,9 +457,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", - "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", + "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", "cpu": [ "x64" ], @@ -468,9 +474,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", - "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", + "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", "cpu": [ "arm64" ], @@ -485,9 +491,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", - "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", + "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", "cpu": [ "x64" ], @@ -502,9 +508,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", - "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", + "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", "cpu": [ "arm64" ], @@ -519,9 +525,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", - "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", + "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", "cpu": [ "x64" ], @@ -536,9 +542,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", - "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", + "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", "cpu": [ "x64" ], @@ -553,9 +559,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", - "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", + "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", "cpu": [ "arm64" ], @@ -570,9 +576,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", - "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", + "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", "cpu": [ "ia32" ], @@ -587,9 +593,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", - "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", + "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", "cpu": [ "x64" ], @@ -604,9 +610,9 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.1.tgz", - "integrity": "sha512-soEIOALTfTK6EjmKMMoLugwaP0rzkad90iIWd1hMO9ARkSAyjfMfkRRhLvD5qH7vvM0Cg72pieUfR6yh6XxC4w==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", "dev": true, "license": "MIT", "dependencies": { @@ -646,11 +652,10 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", - "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", "dev": true, - "license": "Apache-2.0", "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", @@ -661,19 +666,18 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.0.tgz", - "integrity": "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", - "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", + "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -721,13 +725,15 @@ } }, "node_modules/@eslint/js": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.23.0.tgz", - "integrity": "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==", + "version": "9.30.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.30.1.tgz", + "integrity": "sha512-zXhuECFlyep42KZUhWjfvsmXGX39W8K8LFb8AWXM9gSV9dQB+MrJGLKvW6Zw0Ggnbpw0VHTtrhFXYe3Gym18jg==", "dev": true, - "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { @@ -735,25 +741,37 @@ "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", - "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.12.0", + "@eslint/core": "^0.15.1", "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@fastify/busboy": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", @@ -817,9 +835,9 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.2.tgz", - "integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -911,10 +929,19 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@jsdevtools/ono": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", - "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==", + "node_modules/@kwsites/file-exists": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz", + "integrity": "sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==", + "license": "MIT", + "dependencies": { + "debug": "^4.1.1" + } + }, + "node_modules/@kwsites/promise-deferred": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz", + "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==", "license": "MIT" }, "node_modules/@octokit/auth-token": { @@ -1139,9 +1166,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz", - "integrity": "sha512-l7StVw6WAa8l3vA1ov80jyetOAEo1FtHvZDbzXDO/02Sq/QVvqlHkYoFwDJPIMj0GKiistsBudfx5tGFnwYWDQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.44.1.tgz", + "integrity": "sha512-JAcBr1+fgqx20m7Fwe1DxPUl/hPkee6jA6Pl7n1v2EFiktAHenTaXl5aIFjUIEsfn9w3HE4gK1lEgNGMzBDs1w==", "cpu": [ "arm" ], @@ -1153,9 +1180,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.37.0.tgz", - "integrity": "sha512-6U3SlVyMxezt8Y+/iEBcbp945uZjJwjZimu76xoG7tO1av9VO691z8PkhzQ85ith2I8R2RddEPeSfcbyPfD4hA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.44.1.tgz", + "integrity": "sha512-RurZetXqTu4p+G0ChbnkwBuAtwAbIwJkycw1n6GvlGlBuS4u5qlr5opix8cBAYFJgaY05TWtM+LaoFggUmbZEQ==", "cpu": [ "arm64" ], @@ -1167,9 +1194,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.37.0.tgz", - "integrity": "sha512-+iTQ5YHuGmPt10NTzEyMPbayiNTcOZDWsbxZYR1ZnmLnZxG17ivrPSWFO9j6GalY0+gV3Jtwrrs12DBscxnlYA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.44.1.tgz", + "integrity": "sha512-fM/xPesi7g2M7chk37LOnmnSTHLG/v2ggWqKj3CCA1rMA4mm5KVBT1fNoswbo1JhPuNNZrVwpTvlCVggv8A2zg==", "cpu": [ "arm64" ], @@ -1181,9 +1208,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.37.0.tgz", - "integrity": "sha512-m8W2UbxLDcmRKVjgl5J/k4B8d7qX2EcJve3Sut7YGrQoPtCIQGPH5AMzuFvYRWZi0FVS0zEY4c8uttPfX6bwYQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.44.1.tgz", + "integrity": "sha512-gDnWk57urJrkrHQ2WVx9TSVTH7lSlU7E3AFqiko+bgjlh78aJ88/3nycMax52VIVjIm3ObXnDL2H00e/xzoipw==", "cpu": [ "x64" ], @@ -1195,9 +1222,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.37.0.tgz", - "integrity": "sha512-FOMXGmH15OmtQWEt174v9P1JqqhlgYge/bUjIbiVD1nI1NeJ30HYT9SJlZMqdo1uQFyt9cz748F1BHghWaDnVA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.44.1.tgz", + "integrity": "sha512-wnFQmJ/zPThM5zEGcnDcCJeYJgtSLjh1d//WuHzhf6zT3Md1BvvhJnWoy+HECKu2bMxaIcfWiu3bJgx6z4g2XA==", "cpu": [ "arm64" ], @@ -1209,9 +1236,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.37.0.tgz", - "integrity": "sha512-SZMxNttjPKvV14Hjck5t70xS3l63sbVwl98g3FlVVx2YIDmfUIy29jQrsw06ewEYQ8lQSuY9mpAPlmgRD2iSsA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.44.1.tgz", + "integrity": "sha512-uBmIxoJ4493YATvU2c0upGz87f99e3wop7TJgOA/bXMFd2SvKCI7xkxY/5k50bv7J6dw1SXT4MQBQSLn8Bb/Uw==", "cpu": [ "x64" ], @@ -1223,9 +1250,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.37.0.tgz", - "integrity": "sha512-hhAALKJPidCwZcj+g+iN+38SIOkhK2a9bqtJR+EtyxrKKSt1ynCBeqrQy31z0oWU6thRZzdx53hVgEbRkuI19w==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.44.1.tgz", + "integrity": "sha512-n0edDmSHlXFhrlmTK7XBuwKlG5MbS7yleS1cQ9nn4kIeW+dJH+ExqNgQ0RrFRew8Y+0V/x6C5IjsHrJmiHtkxQ==", "cpu": [ "arm" ], @@ -1237,9 +1264,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.37.0.tgz", - "integrity": "sha512-jUb/kmn/Gd8epbHKEqkRAxq5c2EwRt0DqhSGWjPFxLeFvldFdHQs/n8lQ9x85oAeVb6bHcS8irhTJX2FCOd8Ag==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.44.1.tgz", + "integrity": "sha512-8WVUPy3FtAsKSpyk21kV52HCxB+me6YkbkFHATzC2Yd3yuqHwy2lbFL4alJOLXKljoRw08Zk8/xEj89cLQ/4Nw==", "cpu": [ "arm" ], @@ -1251,9 +1278,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.37.0.tgz", - "integrity": "sha512-oNrJxcQT9IcbcmKlkF+Yz2tmOxZgG9D9GRq+1OE6XCQwCVwxixYAa38Z8qqPzQvzt1FCfmrHX03E0pWoXm1DqA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.44.1.tgz", + "integrity": "sha512-yuktAOaeOgorWDeFJggjuCkMGeITfqvPgkIXhDqsfKX8J3jGyxdDZgBV/2kj/2DyPaLiX6bPdjJDTu9RB8lUPQ==", "cpu": [ "arm64" ], @@ -1265,9 +1292,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.37.0.tgz", - "integrity": "sha512-pfxLBMls+28Ey2enpX3JvjEjaJMBX5XlPCZNGxj4kdJyHduPBXtxYeb8alo0a7bqOoWZW2uKynhHxF/MWoHaGQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.44.1.tgz", + "integrity": "sha512-W+GBM4ifET1Plw8pdVaecwUgxmiH23CfAUj32u8knq0JPFyK4weRy6H7ooxYFD19YxBulL0Ktsflg5XS7+7u9g==", "cpu": [ "arm64" ], @@ -1279,9 +1306,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.37.0.tgz", - "integrity": "sha512-yCE0NnutTC/7IGUq/PUHmoeZbIwq3KRh02e9SfFh7Vmc1Z7atuJRYWhRME5fKgT8aS20mwi1RyChA23qSyRGpA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.44.1.tgz", + "integrity": "sha512-1zqnUEMWp9WrGVuVak6jWTl4fEtrVKfZY7CvcBmUUpxAJ7WcSowPSAWIKa/0o5mBL/Ij50SIf9tuirGx63Ovew==", "cpu": [ "loong64" ], @@ -1293,9 +1320,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.37.0.tgz", - "integrity": "sha512-NxcICptHk06E2Lh3a4Pu+2PEdZ6ahNHuK7o6Np9zcWkrBMuv21j10SQDJW3C9Yf/A/P7cutWoC/DptNLVsZ0VQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.44.1.tgz", + "integrity": "sha512-Rl3JKaRu0LHIx7ExBAAnf0JcOQetQffaw34T8vLlg9b1IhzcBgaIdnvEbbsZq9uZp3uAH+JkHd20Nwn0h9zPjA==", "cpu": [ "ppc64" ], @@ -1307,9 +1334,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.37.0.tgz", - "integrity": "sha512-PpWwHMPCVpFZLTfLq7EWJWvrmEuLdGn1GMYcm5MV7PaRgwCEYJAwiN94uBuZev0/J/hFIIJCsYw4nLmXA9J7Pw==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.44.1.tgz", + "integrity": "sha512-j5akelU3snyL6K3N/iX7otLBIl347fGwmd95U5gS/7z6T4ftK288jKq3A5lcFKcx7wwzb5rgNvAg3ZbV4BqUSw==", "cpu": [ "riscv64" ], @@ -1321,9 +1348,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.37.0.tgz", - "integrity": "sha512-DTNwl6a3CfhGTAOYZ4KtYbdS8b+275LSLqJVJIrPa5/JuIufWWZ/QFvkxp52gpmguN95eujrM68ZG+zVxa8zHA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.44.1.tgz", + "integrity": "sha512-ppn5llVGgrZw7yxbIm8TTvtj1EoPgYUAbfw0uDjIOzzoqlZlZrLJ/KuiE7uf5EpTpCTrNt1EdtzF0naMm0wGYg==", "cpu": [ "riscv64" ], @@ -1335,9 +1362,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.37.0.tgz", - "integrity": "sha512-hZDDU5fgWvDdHFuExN1gBOhCuzo/8TMpidfOR+1cPZJflcEzXdCy1LjnklQdW8/Et9sryOPJAKAQRw8Jq7Tg+A==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.44.1.tgz", + "integrity": "sha512-Hu6hEdix0oxtUma99jSP7xbvjkUM/ycke/AQQ4EC5g7jNRLLIwjcNwaUy95ZKBJJwg1ZowsclNnjYqzN4zwkAw==", "cpu": [ "s390x" ], @@ -1349,9 +1376,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.37.0.tgz", - "integrity": "sha512-pKivGpgJM5g8dwj0ywBwe/HeVAUSuVVJhUTa/URXjxvoyTT/AxsLTAbkHkDHG7qQxLoW2s3apEIl26uUe08LVQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.44.1.tgz", + "integrity": "sha512-EtnsrmZGomz9WxK1bR5079zee3+7a+AdFlghyd6VbAjgRJDbTANJ9dcPIPAi76uG05micpEL+gPGmAKYTschQw==", "cpu": [ "x64" ], @@ -1363,9 +1390,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.37.0.tgz", - "integrity": "sha512-E2lPrLKE8sQbY/2bEkVTGDEk4/49UYRVWgj90MY8yPjpnGBQ+Xi1Qnr7b7UIWw1NOggdFQFOLZ8+5CzCiz143w==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.44.1.tgz", + "integrity": "sha512-iAS4p+J1az6Usn0f8xhgL4PaU878KEtutP4hqw52I4IO6AGoyOkHCxcc4bqufv1tQLdDWFx8lR9YlwxKuv3/3g==", "cpu": [ "x64" ], @@ -1377,9 +1404,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.37.0.tgz", - "integrity": "sha512-Jm7biMazjNzTU4PrQtr7VS8ibeys9Pn29/1bm4ph7CP2kf21950LgN+BaE2mJ1QujnvOc6p54eWWiVvn05SOBg==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.44.1.tgz", + "integrity": "sha512-NtSJVKcXwcqozOl+FwI41OH3OApDyLk3kqTJgx8+gp6On9ZEt5mYhIsKNPGuaZr3p9T6NWPKGU/03Vw4CNU9qg==", "cpu": [ "arm64" ], @@ -1391,9 +1418,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.37.0.tgz", - "integrity": "sha512-e3/1SFm1OjefWICB2Ucstg2dxYDkDTZGDYgwufcbsxTHyqQps1UQf33dFEChBNmeSsTOyrjw2JJq0zbG5GF6RA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.44.1.tgz", + "integrity": "sha512-JYA3qvCOLXSsnTR3oiyGws1Dm0YTuxAAeaYGVlGpUsHqloPcFjPg+X0Fj2qODGLNwQOAcCiQmHub/V007kiH5A==", "cpu": [ "ia32" ], @@ -1405,9 +1432,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.37.0.tgz", - "integrity": "sha512-LWbXUBwn/bcLx2sSsqy7pK5o+Nr+VCoRoAohfJ5C/aBio9nfJmGQqHAhU6pwxV/RmyTk5AqdySma7uwWGlmeuA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.44.1.tgz", + "integrity": "sha512-J8o22LuF0kTe7m+8PvW9wk3/bRq5+mRo5Dqo6+vXb7otCm3TPhYOJqOaQtGU9YMWQSL3krMnoOxMr0+9E6F3Ug==", "cpu": [ "x64" ], @@ -1419,23 +1446,50 @@ ] }, "node_modules/@tsconfig/node20": { - "version": "20.1.5", - "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.5.tgz", - "integrity": "sha512-Vm8e3WxDTqMGPU4GATF9keQAIy1Drd7bPwlgzKJnZtoOsTm1tduUTbDjg0W5qERvGuxPI2h9RbMufH0YdfBylA==", + "version": "20.1.6", + "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.6.tgz", + "integrity": "sha512-sz+Hqx9zwZDpZIV871WSbUzSqNIsXzghZydypnfgzPKLltVJfkINfUeTct31n/tTSa9ZE1ZOfKdRre1uHHquYQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", "dev": true, "license": "MIT" }, "node_modules/@types/estree": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", - "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, "node_modules/@types/github-script": { "name": "@actions/github-script", "version": "7.0.1", - "resolved": "git+ssh://git@github.com/actions/github-script.git#3908079ba1e7bce10117ad701c321d07e89017a9", + "resolved": "git+ssh://git@github.com/actions/github-script.git#f28e40c7f34bde8b3046d885e986cb6290c5673b", "dev": true, "license": "MIT", "dependencies": { @@ -1466,33 +1520,41 @@ "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", "license": "MIT" }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", + "version": "20.19.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.1.tgz", + "integrity": "sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~6.19.2" + "undici-types": "~6.21.0" } }, "node_modules/@vitest/coverage-v8": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.9.tgz", - "integrity": "sha512-15OACZcBtQ34keIEn19JYTVuMFTlFrClclwWjHo/IRPg/8ELpkgNTl0o7WLP9WO9XGH6+tip9CPYtEOrIDJvBA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.3.0", "@bcoe/v8-coverage": "^1.0.2", - "debug": "^4.4.0", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", "istanbul-lib-coverage": "^3.2.2", "istanbul-lib-report": "^3.0.1", "istanbul-lib-source-maps": "^5.0.6", "istanbul-reports": "^3.1.7", "magic-string": "^0.30.17", "magicast": "^0.3.5", - "std-env": "^3.8.0", + "std-env": "^3.9.0", "test-exclude": "^7.0.1", "tinyrainbow": "^2.0.0" }, @@ -1500,8 +1562,8 @@ "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "3.0.9", - "vitest": "3.0.9" + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -1510,14 +1572,15 @@ } }, "node_modules/@vitest/expect": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", - "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, @@ -1526,13 +1589,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", - "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.9", + "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -1541,7 +1604,7 @@ }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -1553,9 +1616,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", - "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, "license": "MIT", "dependencies": { @@ -1566,27 +1629,28 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", - "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.9", - "pathe": "^2.0.3" + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", - "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", + "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" }, @@ -1595,27 +1659,27 @@ } }, "node_modules/@vitest/spy": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", - "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, "license": "MIT", "dependencies": { - "tinyspy": "^3.0.2" + "tinyspy": "^4.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", - "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", - "loupe": "^3.1.3", + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" }, "funding": { @@ -1623,9 +1687,9 @@ } }, "node_modules/acorn": { - "version": "8.14.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", - "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", "bin": { @@ -1707,6 +1771,18 @@ "node": ">=12" } }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.3.tgz", + "integrity": "sha512-MuXMrSLVVoA6sYN/6Hke18vMzrT4TZNbZIj/hvh0fnYFpO+/kFXcLIaiPwXXWaQUPg4yJD8fj+lfJ7/1EBconw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "estree-walker": "^3.0.3", + "js-tokens": "^9.0.1" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -1729,9 +1805,9 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -1830,6 +1906,25 @@ "dev": true, "license": "MIT" }, + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -1846,10 +1941,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", - "dev": true, + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -1902,16 +1996,16 @@ "license": "MIT" }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, "node_modules/esbuild": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", - "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", + "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -1922,31 +2016,31 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.1", - "@esbuild/android-arm": "0.25.1", - "@esbuild/android-arm64": "0.25.1", - "@esbuild/android-x64": "0.25.1", - "@esbuild/darwin-arm64": "0.25.1", - "@esbuild/darwin-x64": "0.25.1", - "@esbuild/freebsd-arm64": "0.25.1", - "@esbuild/freebsd-x64": "0.25.1", - "@esbuild/linux-arm": "0.25.1", - "@esbuild/linux-arm64": "0.25.1", - "@esbuild/linux-ia32": "0.25.1", - "@esbuild/linux-loong64": "0.25.1", - "@esbuild/linux-mips64el": "0.25.1", - "@esbuild/linux-ppc64": "0.25.1", - "@esbuild/linux-riscv64": "0.25.1", - "@esbuild/linux-s390x": "0.25.1", - "@esbuild/linux-x64": "0.25.1", - "@esbuild/netbsd-arm64": "0.25.1", - "@esbuild/netbsd-x64": "0.25.1", - "@esbuild/openbsd-arm64": "0.25.1", - "@esbuild/openbsd-x64": "0.25.1", - "@esbuild/sunos-x64": "0.25.1", - "@esbuild/win32-arm64": "0.25.1", - "@esbuild/win32-ia32": "0.25.1", - "@esbuild/win32-x64": "0.25.1" + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" } }, "node_modules/escape-string-regexp": { @@ -1963,20 +2057,19 @@ } }, "node_modules/eslint": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.23.0.tgz", - "integrity": "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==", + "version": "9.30.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.30.1.tgz", + "integrity": "sha512-zmxXPNMOXmwm9E0yQLi5uqXHs7uq2UIiqEKo3Gq+3fwo1XrJ+hijAZImyF7hclW3E6oHz43Yk3RP8at6OTKflQ==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.2", - "@eslint/config-helpers": "^0.2.0", - "@eslint/core": "^0.12.0", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.14.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.23.0", - "@eslint/plugin-kit": "^0.2.7", + "@eslint/js": "9.30.1", + "@eslint/plugin-kit": "^0.3.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -1987,9 +2080,9 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.3.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -2024,9 +2117,9 @@ } }, "node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -2041,9 +2134,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2054,15 +2147,15 @@ } }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2128,9 +2221,9 @@ } }, "node_modules/expect-type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", - "integrity": "sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2158,6 +2251,21 @@ "dev": true, "license": "MIT" }, + "node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -2276,9 +2384,9 @@ } }, "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2302,11 +2410,10 @@ } }, "node_modules/globals": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.0.0.tgz", - "integrity": "sha512-iInW14XItCXET01CQFqudPOWP2jYMl7T+QRQT+UNcR/iQncN/F0UNpgd76iFkBPgNQb4+X3LV9tLJYzwh+Gl3A==", + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.3.0.tgz", + "integrity": "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=18" }, @@ -2478,6 +2585,13 @@ "@pkgjs/parseargs": "^0.11.0" } }, + "node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -2559,9 +2673,9 @@ "license": "MIT" }, "node_modules/loupe": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", - "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.4.tgz", + "integrity": "sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==", "dev": true, "license": "MIT" }, @@ -2611,15 +2725,15 @@ } }, "node_modules/marked": { - "version": "15.0.7", - "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz", - "integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==", + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.0.0.tgz", + "integrity": "sha512-MUKMXDjsD/eptB7GPzxo4xcnLS6oo7/RHimUMHEDRhUooPwmN9BEpMl7AEOJv3bmso169wHI2wUF9VQgL7zfmA==", "license": "MIT", "bin": { "marked": "bin/marked.js" }, "engines": { - "node": ">= 18" + "node": ">= 20" } }, "node_modules/minimatch": { @@ -2649,7 +2763,6 @@ "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, "license": "MIT" }, "node_modules/nanoid": { @@ -2803,9 +2916,9 @@ "license": "MIT" }, "node_modules/pathval": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", "dev": true, "license": "MIT", "engines": { @@ -2819,10 +2932,23 @@ "dev": true, "license": "ISC" }, + "node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/postcss": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -2840,7 +2966,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -2895,13 +3021,13 @@ } }, "node_modules/rollup": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz", - "integrity": "sha512-iAtQy/L4QFU+rTJ1YUjXqJOJzuwEghqWzCEYD2FEghT7Gsy1VdABntrO4CLopA5IkflTyqNiLNwPcOJ3S7UKLg==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.44.1.tgz", + "integrity": "sha512-x8H8aPvD+xbl0Do8oez5f5o8eMS3trfCghc4HhLAnCkj7Vl0d1JWGs0UF/D886zLW2rOj2QymV/JcSSsw+XDNg==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -2911,40 +3037,33 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.37.0", - "@rollup/rollup-android-arm64": "4.37.0", - "@rollup/rollup-darwin-arm64": "4.37.0", - "@rollup/rollup-darwin-x64": "4.37.0", - "@rollup/rollup-freebsd-arm64": "4.37.0", - "@rollup/rollup-freebsd-x64": "4.37.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.37.0", - "@rollup/rollup-linux-arm-musleabihf": "4.37.0", - "@rollup/rollup-linux-arm64-gnu": "4.37.0", - "@rollup/rollup-linux-arm64-musl": "4.37.0", - "@rollup/rollup-linux-loongarch64-gnu": "4.37.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.37.0", - "@rollup/rollup-linux-riscv64-gnu": "4.37.0", - "@rollup/rollup-linux-riscv64-musl": "4.37.0", - "@rollup/rollup-linux-s390x-gnu": "4.37.0", - "@rollup/rollup-linux-x64-gnu": "4.37.0", - "@rollup/rollup-linux-x64-musl": "4.37.0", - "@rollup/rollup-win32-arm64-msvc": "4.37.0", - "@rollup/rollup-win32-ia32-msvc": "4.37.0", - "@rollup/rollup-win32-x64-msvc": "4.37.0", + "@rollup/rollup-android-arm-eabi": "4.44.1", + "@rollup/rollup-android-arm64": "4.44.1", + "@rollup/rollup-darwin-arm64": "4.44.1", + "@rollup/rollup-darwin-x64": "4.44.1", + "@rollup/rollup-freebsd-arm64": "4.44.1", + "@rollup/rollup-freebsd-x64": "4.44.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.44.1", + "@rollup/rollup-linux-arm-musleabihf": "4.44.1", + "@rollup/rollup-linux-arm64-gnu": "4.44.1", + "@rollup/rollup-linux-arm64-musl": "4.44.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.44.1", + "@rollup/rollup-linux-powerpc64le-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-musl": "4.44.1", + "@rollup/rollup-linux-s390x-gnu": "4.44.1", + "@rollup/rollup-linux-x64-gnu": "4.44.1", + "@rollup/rollup-linux-x64-musl": "4.44.1", + "@rollup/rollup-win32-arm64-msvc": "4.44.1", + "@rollup/rollup-win32-ia32-msvc": "4.44.1", + "@rollup/rollup-win32-x64-msvc": "4.44.1", "fsevents": "~2.3.2" } }, - "node_modules/rollup/node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true, - "license": "MIT" - }, "node_modules/semver": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "license": "ISC", "bin": { @@ -2997,6 +3116,21 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/simple-git": { + "version": "3.28.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.28.0.tgz", + "integrity": "sha512-Rs/vQRwsn1ILH1oBUy8NucJlXmnnLeLCfcvbSehkPzbv3wwoFWIdtfd6Ndo6ZPhlPsCZ60CPI4rxurnwAa+a2w==", + "license": "MIT", + "dependencies": { + "@kwsites/file-exists": "^1.1.1", + "@kwsites/promise-deferred": "^1.1.1", + "debug": "^4.4.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/steveukx/git-js?sponsor=1" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -3015,9 +3149,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.1.tgz", - "integrity": "sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", "dev": true, "license": "MIT" }, @@ -3138,6 +3272,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -3167,9 +3314,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3206,10 +3353,27 @@ "dev": true, "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", "dev": true, "license": "MIT", "engines": { @@ -3227,9 +3391,9 @@ } }, "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", "dev": true, "license": "MIT", "engines": { @@ -3260,9 +3424,9 @@ } }, "node_modules/typescript": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", - "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -3287,9 +3451,9 @@ } }, "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "dev": true, "license": "MIT" }, @@ -3311,21 +3475,24 @@ } }, "node_modules/vite": { - "version": "6.2.4", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.4.tgz", - "integrity": "sha512-veHMSew8CcRzhL5o8ONjy8gkfmFJAd5Ac16oxBUjlwgX3Gq2Wqr+qNC3TjPIpy7TPV/KporLga5GT9HqdrCizw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.0.0.tgz", + "integrity": "sha512-ixXJB1YRgDIw2OszKQS9WxGHKwLdCsbQNkpJN171udl6szi/rIySHL6/Os3s2+oE4P/FLD4dxg4mD7Wust+u5g==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", - "postcss": "^8.5.3", - "rollup": "^4.30.1" + "fdir": "^6.4.6", + "picomatch": "^4.0.2", + "postcss": "^8.5.6", + "rollup": "^4.40.0", + "tinyglobby": "^0.2.14" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -3334,14 +3501,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -3383,17 +3550,17 @@ } }, "node_modules/vite-node": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", - "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.4.0", - "es-module-lexer": "^1.6.0", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" @@ -3406,31 +3573,34 @@ } }, "node_modules/vitest": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", - "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.9", - "@vitest/mocker": "3.0.9", - "@vitest/pretty-format": "^3.0.9", - "@vitest/runner": "3.0.9", - "@vitest/snapshot": "3.0.9", - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", "chai": "^5.2.0", - "debug": "^4.4.0", - "expect-type": "^1.1.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", - "std-env": "^3.8.0", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", - "tinypool": "^1.0.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "bin": { @@ -3446,8 +3616,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.9", - "@vitest/ui": "3.0.9", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, @@ -3620,21 +3790,6 @@ "dev": true, "license": "ISC" }, - "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/.github/package.json b/.github/package.json index 503cf1493665..f11f98efc071 100644 --- a/.github/package.json +++ b/.github/package.json @@ -1,34 +1,41 @@ { + "private": "true", "type": "module", "_comments": { - "dependencies": "Runtime dependencies must be kept to an absolute minimum for performance, ideally with no transitive dependencies" + "dependencies": "Runtime dependencies must be kept to an absolute minimum for performance, ideally with no transitive dependencies", + "dependencies2": "All runtime and dev dependencies in this file, must be a superset of shared/package.json" }, "dependencies": { - "@apidevtools/json-schema-ref-parser": "^11.9.3", + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "debug": "^4.4.0", "js-yaml": "^4.1.0", - "marked": "^15.0.7" + "marked": "^16.0.0", + "simple-git": "^3.27.0" }, "devDependencies": { "@eslint/js": "^9.22.0", "@octokit/webhooks-types": "^7.5.1", "@tsconfig/node20": "^20.1.4", + "@types/debug": "^4.1.12", "@types/github-script": "github:actions/github-script", "@types/js-yaml": "^4.0.9", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "cross-env": "^7.0.3", "eslint": "^9.22.0", "globals": "^16.0.0", "prettier": "~3.5.3", + "semver": "^7.7.1", "typescript": "~5.8.2", "vitest": "^3.0.7" }, "scripts": { "lint": "npm run lint:eslint && npm run lint:tsc", - "lint:eslint": "eslint", - "lint:tsc": "tsc && echo 'Type checking completed successfully'", - "prettier": "prettier \"**/*.js\" --check", - "prettier:debug": "prettier \"**/*.js\" --check ---log-level debug", - "prettier:write": "prettier \"**/*.js\" --write", + "lint:eslint": "cross-env DEBUG=eslint:eslint eslint", + "lint:tsc": "tsc --build --verbose", + "format": "prettier . --write", + "format:check": "prettier . --check", + "format:check:ci": "prettier . --check --log-level debug", "test": "vitest", "test:ci": "vitest run --coverage --reporter=verbose" } diff --git a/.github/policies/resourceManagement.yml b/.github/policies/resourceManagement.yml index ab740a6edda8..c83f90dcd345 100644 --- a/.github/policies/resourceManagement.yml +++ b/.github/policies/resourceManagement.yml @@ -670,7 +670,8 @@ configuration: then: - mentionUsers: mentionees: - - yangyuan + - rkuthala + - MattGal replyTemplate: Thanks for the feedback! We are routing this to the appropriate team for follow-up. cc ${mentionees}. assignMentionees: False - if: @@ -937,8 +938,7 @@ configuration: then: - mentionUsers: mentionees: - - Jingshu923 - - zhangyd2015 + - ruowan - Frey-Wang replyTemplate: Thanks for the feedback! We are routing this to the appropriate team for follow-up. cc ${mentionees}. assignMentionees: False diff --git a/.github/prompts/check-api-readiness.prompt.md b/.github/prompts/check-api-readiness.prompt.md new file mode 100644 index 000000000000..9db18c3967f0 --- /dev/null +++ b/.github/prompts/check-api-readiness.prompt.md @@ -0,0 +1,7 @@ +--- +mode: 'agent' +tools: ['CheckApiReadyForSDKGeneration', 'codebase', 'GetPullRequest', 'GetGitHubUserDetails', 'GetPullRequestForCurrentBranch'] +description: 'Check API Readiness for SDK Generation' +--- +Your goal is to check if API spec pull request is ready for SDK generation. Identify the next action required from user based on the comments on spec pull request if spec is not ready and notify the user. +Before running, get spec pull request link for current branch or from user if not available in current context. If pull request has APIView links, then highlight them to user. \ No newline at end of file diff --git a/.github/prompts/check-package-readiness.prompt.md b/.github/prompts/check-package-readiness.prompt.md new file mode 100644 index 000000000000..cd30574baa12 --- /dev/null +++ b/.github/prompts/check-package-readiness.prompt.md @@ -0,0 +1,36 @@ +--- +mode: 'agent' +tools: ['CheckPackageReleaseReadiness'] +description: 'This prompt is designed to check the release readiness of a SDK package.' +--- +## Goal +Check the release readiness of an SDK package by collecting the required information from the user and executing the readiness check. + +## Instructions +1. **Collect Required Information**: + - Prompt the user for the exact package name + - Prompt the user to select the programming language from the following options (case sensitive): + - Python + - Java + - JavaScript + - .NET + - Go + +2. **Execute Readiness Check**: + - Use the `CheckPackageReleaseReadiness` tool with the provided package name and selected language + - Do not check for existing pull requests to run this step. + - Do not ask the user to create a release plan to run this step. + +3. **Present Results**: + - If the package is ready for release, highlight and provide the link to the release pipeline + - If the package is not ready, display the specific issues that need to be resolved + +4. **Follow-up Actions**: + - Provide clear next steps based on the readiness status + - If issues are found, offer guidance on how to resolve them + +## Expected User Interaction Flow +1. Ask: "What is the exact name of the package you want to check for release readiness?" +2. Ask: "Please select the programming language for this package: Python, Java, JavaScript, .NET, or Go" +3. Execute the readiness check using the provided information +4. Display results and next steps diff --git a/.github/prompts/create-release-plan.prompt.md b/.github/prompts/create-release-plan.prompt.md new file mode 100644 index 000000000000..0cee3bedde3b --- /dev/null +++ b/.github/prompts/create-release-plan.prompt.md @@ -0,0 +1,61 @@ +--- +mode: 'agent' +tools: ['CreateReleasePlan', 'GetReleasePlanForPullRequest', 'GetReleasePlan', 'LinkSdkPullRequestToReleasePlan'] +--- + +# Release Plan Creation Process +You goal is to create a valid release plan. You must prompt user to provide all required information and all input must match the format and requirement mentioned in step 3 below. +Follow these steps in order to create or manage a release plan for an API specification pull request: + +## Step 1: Validate Prerequisites +- Check if an API spec pull request is available in the current context +- If no pull request is available, prompt the user to provide the API spec pull request link +- Validate that the provided pull request link is accessible and valid + +## Step 2: Check Existing Release Plan +- Use `GetReleasePlanForPullRequest` to check if a release plan already exists for the API spec pull request +- If a release plan exists: + - Display the existing release plan details to the user + - Skip to Step 5 (Link SDK Pull Requests) +- If no release plan exists, proceed to Step 3 + +## Step 3: Gather Release Plan Information +Collect the following required information from the user. Do not use non GUID valid for product and service tree Id. Do not create release plan with temporary values. +Do not assume or use default for service tree Id and product service tree Id. Always show the values to user and ask them to confirm it's a valid value in service tree. +If any details are missing, prompt the user accordingly: + +- **API Lifecycle Stage**: Must be one of: + - Private Preview + - Public Preview + - GA (Generally Available) +- **Service Tree ID**: GUID format identifier for the service in Service Tree. Before creating release plan, always show the value to user and ask them to confirm it's a valid value in service tree. +- **Product Service Tree ID**: GUID format identifier for the product in Service Tree. Before creating release plan, always show the value to user and ask them to confirm it's a valid value in service tree. +- **Expected Release Timeline**: Format must be in "Month YYYY" +- **API Version**: The version of the API being released +- **SDK Release Type**: Value must be beta or stable. + - "beta" for preview API versions + - "stable" for GA API versions + +## Step 4: Create Release Plan +- If the user doesn't know the required details, direct them to create a release plan using the release planner +- Provide this resource: [Release Plan Creation Guide](https://eng.ms/docs/products/azure-developer-experience/plan/release-plan-create) +- Once all information is gathered, use `CreateReleasePlan` to create the release plan +- Display the newly created release plan details to the user for confirmation +- Run `/sdk-details-in-release-plan` to identify languages configured in the TypeSpec project and add them to the release plan + +## Step 5: Update SDK Details in Release Plan +- Run `/sdk-details-in-release-plan.prompt.md` to add languages and package names to the release plan +- If the TypeSpec project is for a management plane, run `/verify-namespace-approval` if this is first release of SDK. + +## Step 6: Link SDK Pull Requests (if applicable) +- Ask the user if they have already created SDK pull requests locally for any programming language +- If SDK pull requests exist: + - Collect the pull request links from the user + - Use `LinkSdkPullRequestToReleasePlan` to link each SDK pull request to the release plan + - Confirm successful linking for each SDK pull request + +## Step 7: Summary +- Display a summary of the completed actions: + - Release plan status (created or existing) + - Linked SDK pull requests (if any) + - Next steps or recommendations for the user \ No newline at end of file diff --git a/.github/prompts/create-sdk-locally.prompt.md b/.github/prompts/create-sdk-locally.prompt.md new file mode 100644 index 000000000000..34a1a76c5f08 --- /dev/null +++ b/.github/prompts/create-sdk-locally.prompt.md @@ -0,0 +1,36 @@ +--- +mode: 'agent' +tools: ['codebase'] +--- +Your goal is to help guide the user to create SDK locally for TypeSpec changes. This is currently supported for **Python** only. User can generate SDK for other languages using SDK generation pipeline. +## Steps to create Python SDK locally from TypeSpec +### Step 1: Check for existing azure-sdk-for-python repository +- Prompt the user to provide the path to their cloned azure-sdk-for-python repository. +### Step 2: Validate repository path +- If the user provides a path to the azure-sdk-for-python repository: + - Check if the repository exists at the specified path. + - If the repository exists, proceed to Step 5. +### Step 3: Guide user to set up azure-sdk-for-python repository (if not found) +- If the user does not have the repository or the path is invalid: + - Go to parent directory of current repo root path. + - Provide instructions to fork https://github.com/Azure/azure-sdk-for-python repository to the user's GitHub account. + - Provide instructions to clone the forked repository to the local machine: + ```bash + git clone https://github.com//azure-sdk-for-python.git + ``` +### Step 4: Set repository path +- Consider the cloned path as the path to the azure-sdk-for-python repository. +### Step 5: Open azure-sdk-for-python repository in VSCode +- Do not ask the user to run tsp compile. +- Prompt user to open the azure-sdk-for-python repository in VSCode. +### Step 6: Provide SDK generation instructions +- Inform user to use the following prompt to start SDK generation using GitHub Copilot agent: + ``` + "Help me generate SDK for Python from TypeSpec API specification for project ." + ``` +### Step 7: Inform user about SDK generation +- Inform user to provide link to SDK pull request if they generate DSK locally and created a pull request for it. SDK generation +step below will skip it for the language and reuse the pull request link provided by the user. +- In some cases, user will come back and make more changes to TypeSpec so start the process from step 1 again. +- If user provides a link to SDK pull request then link SDK pull request to release plan if a release plan already exists and skip SDK generation for that language. +- If a release plan does not exits then link the SDK pull request when release plan is created. diff --git a/.github/prompts/create-spec-pullrequest.prompt.md b/.github/prompts/create-spec-pullrequest.prompt.md new file mode 100644 index 000000000000..b3284f0db417 --- /dev/null +++ b/.github/prompts/create-spec-pullrequest.prompt.md @@ -0,0 +1,7 @@ +--- +mode: 'agent' +tools: ['codebase', 'CreatePullRequest', 'GetModifiedTypeSpecProjects', 'GetGitHubUserDetails', 'CheckIfSpecInPublicRepo', 'GetPullRequest', 'GetPullRequestForCurrentBranch'] +--- +Your goal is to identify modified TypeSpec project in current branch and create a pull request for it. +Check if a pull request already exists using GetPullRequestForCurrentBranch. If a pull request exists, inform the user and show the pull request details. If no pull request exists, create a new pull request using CreatePullRequest. + diff --git a/.github/prompts/run-sdk-gen-pipeline.prompt.md b/.github/prompts/run-sdk-gen-pipeline.prompt.md new file mode 100644 index 000000000000..57d0aef7c278 --- /dev/null +++ b/.github/prompts/run-sdk-gen-pipeline.prompt.md @@ -0,0 +1,45 @@ +--- +mode: 'agent' +tools: ['GenerateSDK', 'GetSDKPullRequestDetails', 'GetReleasePlan', 'GetReleasePlanForPullRequest', 'GetPipelineRun', 'GetPipelineRunStatus', 'LinkSdkPullRequestToReleasePlan'] +description: 'Generate SDKs from TypeSpec using pipeline' +--- +Your goal is to generate SDKs from the TypeSpec spec pull request. Get API spec pull request link for current branch or from user if not available in current context. +Provide links to SDK pull request when generated for each language. + +## Steps for SDK Generation + +### Step 1: Check for Existing SDK Pull Requests +- Check if SDK pull requests exist from local SDK generation for any languages +- If SDK pull request exists for a language, skip SDK generation for that language +- Link existing SDK pull request to release plan + +### Step 2: Retrieve and Validate Release Plan +- Retrieve the release plan for the API spec +- If API Lifecycle Stage is `Private Preview` then inform user that SDK generation is not supported for this stage and complete the workflow. +- Check if SDK generation has already occurred for each language +- Verify if SDK pull requests exist for each language: + - If an SDK pull request exists, display its details + - If no pull request exists or regeneration is needed, proceed to next step + +### Step 3: Execute SDK Generation Pipeline +- Run SDK generation for each required language: Python, .NET, JavaScript, Java, and Go +- Execute the SDK generation pipeline with the following required parameters: + - TypeSpec project root path + - Pull request number (if the API spec is not merged to the main branch) + - API version + - SDK release type (beta for preview API versions, stable otherwise) + - Language options: `Python`, `.NET`, `JavaScript`, `Java`, `Go` + - Release plan work item ID + +### Step 4: Monitor Pipeline Status +- Check the status of SDK generation pipeline every 2 minutes +- Continue monitoring until pipeline succeeds or fails +- Get SDK pull request link from pipeline once available + +### Step 5: Display Results +- Show all pipeline details once pipeline is in completed status +- Highlight the language name for each SDK generation task when displaying details +- Once SDK pull request URL is available: + - Inform the user of successful SDK generation + - Display the pull request details for each language + - Provide links to each generated SDK pull request \ No newline at end of file diff --git a/.github/prompts/sdk-details-in-release-plan.prompt.md b/.github/prompts/sdk-details-in-release-plan.prompt.md new file mode 100644 index 000000000000..862b1470fb99 --- /dev/null +++ b/.github/prompts/sdk-details-in-release-plan.prompt.md @@ -0,0 +1,41 @@ +--- +mode: 'agent' +description: 'Identify languages configured in the TypeSpec project and add it to release plan' +tools: ['GetReleasePlanForPullRequest', 'GetReleasePlan', 'UpdateReleasePlanSDKInfo'] +--- +# Step 1: Find the list of languages and package names +**Goal**: Identify languages configured in the TypeSpec project and generate the json object with language and package name. +1. Identify the language emitter configuration in the `tspconfig.yaml` file in the TypeSpec project root. +2. Identify the package name or namespace for each language emitter. +3. Map the language name in emitter to one of the following in Pascal case(except .NET): + - .NET + - Java + - Python + - JavaScript + - Go +4. Remove `github.com/Azure/azure-sdk-for-go/` from Go package name. +4. Create a JSON array object with the following structure: + ```json + [ + { + "language": "", + "packageName": "" + }, + ... + ] + ``` +5. If no languages are configured, inform the user: "No languages configured in TypeSpec project. Please add at least one language emitter in tspconfig.yaml." +**Success Criteria**: JSON object with languages and package names created. + +# Step 2: Check if release plan exists +**Goal**: Determine if a release plan exists for the API spec pull request or work item Id or release plan Id in current context. +1. Get release plan +2. If no release plan exists, inform the user: "No release plan exists for the API spec pull request. Please create a release plan first." +3. If a release plan exists, proceed to Step 3. +**Success Criteria**: Release plan exists or user informed to create one. + +# Step 3: Update Release Plan with SDK Information +**Goal**: Update the release plan with the languages and package names identified in Step 1. +1. Use `UpdateReleasePlanSDKInfo` to update the release plan work item with the JSON object created in Step 1. +2. Confirm successful update of the release plan with the SDK information and summary of languages and package names. +**Success Criteria**: Release plan updated with languages and package names. \ No newline at end of file diff --git a/.github/prompts/typespec-to-sdk.prompt.md b/.github/prompts/typespec-to-sdk.prompt.md new file mode 100644 index 000000000000..304910be1399 --- /dev/null +++ b/.github/prompts/typespec-to-sdk.prompt.md @@ -0,0 +1,116 @@ +--- +mode: 'agent' +description: 'Generate SDKs from TypeSpec' +--- +Your goal is to guide user through the process of generating SDKs from TypeSpec projects. Show all the high level steps to the user to ensure they understand the flow. Use the provided tools to perform actions and gather information as needed. + +## Pre-Flight Check +- Verify ${workspaceFolder} is not on main branch +- If on main branch, prompt user: "You are currently on the main branch. Please create a new branch using `git checkout -b ` before proceeding." +- Wait for user confirmation before continuing + +## Step 1: Identify TypeSpec Project +**Goal**: Locate the TypeSpec project root path +**Actions**: +1. Check if `tspconfig.yaml` or `main.tsp` files are open in editor +2. If found, use the parent directory as project root +3. If not found, prompt user: "Please provide the path to your TypeSpec project root directory" +4. Validate the provided path contains required TypeSpec files +**Success Criteria**: Valid TypeSpec project path identified + +## Step 2: Validate TypeSpec Specification +**Goal**: Ensure TypeSpec specification compiles without errors +**Actions**: +1. Run `/validate-typespec` command +2. If validation succeeds, proceed to Step 3 +3. If validation fails: + - Display all compilation errors to user + - Prompt: "Please fix the TypeSpec compilation errors before proceeding" + - Wait for user to fix errors and re-run validation +**Success Criteria**: TypeSpec compilation passes without errors + +## Step 3: Verify Authentication and Repository Status +**Goal**: Ensure user is authenticated and working in correct repository +**Actions**: +1. Run `GetGitHubUserDetails` to verify login status +2. If not logged in, prompt: "Please login to GitHub using `gh auth login`" +3. Once logged in, display user details to confirm identity +4. Run `CheckIfSpecInPublicRepo` to verify repository +5. If not in public repo, inform: "Please make spec changes in Azure/azure-rest-api-specs public repo to generate SDKs" +**Success Criteria**: User authenticated and working in public Azure repo + +## Step 4: Review and Commit Changes +**Goal**: Stage and commit TypeSpec modifications +**Actions**: +1. Run `GetModifiedTypeSpecProjects` to identify changes +2. If no changes found, inform: "No TypeSpec projects were modified in current branch" +3. Display all modified files (excluding `.github` and `.vscode` folders) +4. Prompt user: "Please review the modified files. Do you want to commit these changes? (yes/no)" +5. If yes: + - Verify current branch is not "main" + - Run `git add ` + - Prompt for commit message + - Run `git commit -m ""` + - Run `git push -u origin ` +**Success Criteria**: Changes committed and pushed to remote branch + +## Step 5: Choose SDK Generation Method +**Goal**: Determine how to generate SDKs +**Actions**: +1. Present options: "How would you like to generate SDKs?" + - Option A: "Generate SDK locally". This is currently supported only for Python. Do not recommend this for other languages. + - Option B: "Use SDK generation pipeline" +2. Based on selection: + - If Option A: Run `/create-sdk-locally` and then proceed to Step 6 + - If Option B: Continue to Step 6 +**Success Criteria**: SDK generation method selected + +## Step 6: Create Specification Pull Request +**Goal**: Create PR for TypeSpec changes if not already created +**Actions**: +1. Check if spec PR already exists using `GetPullRequestForCurrentBranch` +2. If PR exists, display PR details and proceed to Step 7 +3. If no PR exists: + - Run `/create-spec-pullrequest` + - Wait for PR creation confirmation + - Display created PR details +**Success Criteria**: Specification pull request exists + +## Step 7: Generate SDKs via Pipeline +**Goal**: Create release plan and generate SDKs +**Actions**: +1. Run `/create-release-plan` +2. If SDK PRs exist, link them to the release plan +3. Run `/sdk-details-in-release-plan` to add languages and package names to the release plan +4. If TypeSpec project is for management plane, Run `/verify-namespace-approval` to check package namespace approval. +This step should not check package readiness to verify namespace approval for management plane SDK. +5. Run `/run-sdk-gen-pipeline` with the spec PR +6. Monitor pipeline status and provide updates +7. Display generated SDK PR links when available +**Success Criteria**: SDK generation pipeline initiated and SDKs generated + +## Step 8: Show Generated SDK PRs +**Goal**: Display all created SDK pull requests +**Actions**: +1. Run `GetSDKPullRequestDetails` to fetch generated SDK PR info. + +## Step 9: Create release plan +**Goal**: Create a release plan for the generated SDKs +**Actions**: +1. Run `/create-release-plan` to create a release plan using the spec pull request. +2. If the release plan already exists, display the existing plan details. + +## Step 10: Mark Spec PR as Ready for Review +**Goal**: Update spec PR to ready for review status +**Actions**: +1. Prompt user to change spec PR to ready for review: "Please change the spec pull request to ready for review status" +2. Get approval and merge the spec PR + +## Step 11: Release SDK Package +**Goal**: Release the SDK package using the release plan +**Actions**: +1. Run `ReleaseSdkPackage` to release the SDK package. +2. Inform user to approve the package release using release pipeline. + +## Process Complete +Display summary of all created PRs and next steps for user. \ No newline at end of file diff --git a/.github/prompts/validate-typespec.prompt.md b/.github/prompts/validate-typespec.prompt.md new file mode 100644 index 000000000000..debdc6eaa31e --- /dev/null +++ b/.github/prompts/validate-typespec.prompt.md @@ -0,0 +1,8 @@ +--- +mode: 'agent' +tools: ['RunTypeSpecValidation'] +description: 'Validate TypeSpec' +--- +Your goal is identify the TypeSpec project root if not available in current context and validate TypeSpec project. +Before running, inform user that TypeSpec validation takes around 20 - 30 seconds. Provide complete summary after +running the tool and highlight any errors and help user fix them. \ No newline at end of file diff --git a/.github/prompts/verify-namespace-approval.prompt.md b/.github/prompts/verify-namespace-approval.prompt.md new file mode 100644 index 000000000000..fa78f6ef6c76 --- /dev/null +++ b/.github/prompts/verify-namespace-approval.prompt.md @@ -0,0 +1,24 @@ +--- +mode: 'agent' +description: 'Verify SDK namespace approval for management plane' +tools: ['GetReleasePlan', 'GetReleasePlanForPullRequest', 'LinkNameSpaceApprovalIssue'] +--- +This task is required only for management plan API spec and only if a release plan exists for the API spec pull request. + +## Step 1: Check if release plan exists and it is for management plane SDK +**Goal**: Determine if a release plan exists for the API spec pull request or work item Id or release plan Id in current context. +**Actions**: +1. Get release plan and check if it is for management plane SDK +2. If not, inform user: "This task is only applicable for management plane SDKs. No action required." +3. Check if release plan already has namespace approval issue. Also prompt user to check if this is the first release of SDK. +4. If namespace approval issue exists, inform user: "Namespace approval issue already exists for this release plan.". Prompt user to +check if they want to link a different namespace approval issue to the release plan. Show namespace approval status. +5. Move to Step 2 if namespace approval issue does not exist or user wants to link a different namespace approval issue. + +## Step 2: Gather Namespace Approval Information +**Goal**: Link namespace approval issue to the release plan. +**Actions**: +1. Collect GitHub issue created in Azure/azure-sdk repo for namespace approval. Do not use any other repo name. +2. Run `LinkNameSpaceApprovalIssue` to link the issue to the release plan work item id. +3. Confirm successful linking of the namespace approval issue to the release plan. +**Success Criteria**: Namespace approval issue linked to the release plan or confirmed as already linked. diff --git a/.github/shared/.prettierignore b/.github/shared/.prettierignore new file mode 100644 index 000000000000..4ebc8aea50e0 --- /dev/null +++ b/.github/shared/.prettierignore @@ -0,0 +1 @@ +coverage diff --git a/.github/shared/cmd/spec-model.js b/.github/shared/cmd/spec-model.js new file mode 100755 index 000000000000..85c28718cf5e --- /dev/null +++ b/.github/shared/cmd/spec-model.js @@ -0,0 +1,39 @@ +#!/usr/bin/env node + +import { ConsoleLogger } from "../src/logger.js"; +import { SpecModel } from "../src/spec-model.js"; + +const USAGE = + "Usage: npx spec-model path/to/spec [--debug] [--include-refs] [--relative-paths]\n" + + "Example: npx spec-model specfication/contosowidgetmanager"; + +// Exclude first two args (node, script file) +let args = process.argv.slice(2); + +const debug = args.includes("--debug"); +args = args.filter((a) => a != "--debug"); + +const includeRefs = args.includes("--include-refs"); +args = args.filter((a) => a != "--include-refs"); + +const relativePaths = args.includes("--relative-paths"); +args = args.filter((a) => a != "--relative-paths"); + +if (args.length < 1) { + console.error(USAGE); + process.exit(1); +} + +if (args.length > 1) { + console.error("ERROR: Too many arguments\n"); + console.error(USAGE); + process.exit(1); +} + +const specPath = args[0]; + +const specModel = new SpecModel(specPath, { + logger: new ConsoleLogger(debug), +}); + +console.log(JSON.stringify(await specModel.toJSONAsync({ includeRefs, relativePaths }), null, 2)); diff --git a/.github/shared/eslint.config.js b/.github/shared/eslint.config.js new file mode 100644 index 000000000000..d3c33d8d947d --- /dev/null +++ b/.github/shared/eslint.config.js @@ -0,0 +1,5 @@ +import pluginJs from "@eslint/js"; +import globals from "globals"; + +/** @type {import('eslint').Linter.Config[]} */ +export default [{ languageOptions: { globals: globals.node } }, pluginJs.configs.recommended]; diff --git a/.github/shared/package-lock.json b/.github/shared/package-lock.json new file mode 100644 index 000000000000..6a2e6b030f91 --- /dev/null +++ b/.github/shared/package-lock.json @@ -0,0 +1,3434 @@ +{ + "name": "@azure-tools/specs-shared", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@azure-tools/specs-shared", + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "debug": "^4.4.0", + "js-yaml": "^4.1.0", + "marked": "^16.0.0", + "simple-git": "^3.27.0" + }, + "bin": { + "spec-model": "cmd/spec-model.js" + }, + "devDependencies": { + "@eslint/js": "^9.22.0", + "@tsconfig/node20": "^20.1.4", + "@types/debug": "^4.1.12", + "@types/js-yaml": "^4.0.9", + "@types/node": "^20.0.0", + "@vitest/coverage-v8": "^3.0.7", + "cross-env": "^7.0.3", + "eslint": "^9.22.0", + "globals": "^16.0.0", + "prettier": "~3.5.3", + "semver": "^7.7.1", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@apidevtools/json-schema-ref-parser": { + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-14.0.3.tgz", + "integrity": "sha512-XtI3vr6mq5ySDV7j+/ya7m9UDkRYN91NeSM5CBjGE8EZHXTuu5duHMm5emG+X8tmjRCYpEkWpHfxHpVR91owVg==", + "dependencies": { + "@types/json-schema": "^7.0.15", + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">= 20" + }, + "funding": { + "url": "https://github.com/sponsors/philsturgeon" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz", + "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.7" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/types": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.7.tgz", + "integrity": "sha512-8OLQgDScAOHXnAz2cV+RfzzNMipuLVBz2biuAJFMV9bfkNf393je3VM8CLkjQodW5+iWsSJdSgSWT6rsZoXHPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", + "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz", + "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", + "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz", + "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", + "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", + "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", + "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", + "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", + "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", + "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", + "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", + "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", + "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", + "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", + "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", + "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", + "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", + "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", + "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", + "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", + "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", + "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", + "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", + "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", + "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "dev": true, + "dependencies": { + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", + "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.30.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.30.1.tgz", + "integrity": "sha512-zXhuECFlyep42KZUhWjfvsmXGX39W8K8LFb8AWXM9gSV9dQB+MrJGLKvW6Zw0Ggnbpw0VHTtrhFXYe3Gym18jg==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.15.1", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@kwsites/file-exists": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz", + "integrity": "sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==", + "license": "MIT", + "dependencies": { + "debug": "^4.1.1" + } + }, + "node_modules/@kwsites/promise-deferred": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz", + "integrity": "sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==", + "license": "MIT" + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.44.1.tgz", + "integrity": "sha512-JAcBr1+fgqx20m7Fwe1DxPUl/hPkee6jA6Pl7n1v2EFiktAHenTaXl5aIFjUIEsfn9w3HE4gK1lEgNGMzBDs1w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.44.1.tgz", + "integrity": "sha512-RurZetXqTu4p+G0ChbnkwBuAtwAbIwJkycw1n6GvlGlBuS4u5qlr5opix8cBAYFJgaY05TWtM+LaoFggUmbZEQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.44.1.tgz", + "integrity": "sha512-fM/xPesi7g2M7chk37LOnmnSTHLG/v2ggWqKj3CCA1rMA4mm5KVBT1fNoswbo1JhPuNNZrVwpTvlCVggv8A2zg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.44.1.tgz", + "integrity": "sha512-gDnWk57urJrkrHQ2WVx9TSVTH7lSlU7E3AFqiko+bgjlh78aJ88/3nycMax52VIVjIm3ObXnDL2H00e/xzoipw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.44.1.tgz", + "integrity": "sha512-wnFQmJ/zPThM5zEGcnDcCJeYJgtSLjh1d//WuHzhf6zT3Md1BvvhJnWoy+HECKu2bMxaIcfWiu3bJgx6z4g2XA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.44.1.tgz", + "integrity": "sha512-uBmIxoJ4493YATvU2c0upGz87f99e3wop7TJgOA/bXMFd2SvKCI7xkxY/5k50bv7J6dw1SXT4MQBQSLn8Bb/Uw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.44.1.tgz", + "integrity": "sha512-n0edDmSHlXFhrlmTK7XBuwKlG5MbS7yleS1cQ9nn4kIeW+dJH+ExqNgQ0RrFRew8Y+0V/x6C5IjsHrJmiHtkxQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.44.1.tgz", + "integrity": "sha512-8WVUPy3FtAsKSpyk21kV52HCxB+me6YkbkFHATzC2Yd3yuqHwy2lbFL4alJOLXKljoRw08Zk8/xEj89cLQ/4Nw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.44.1.tgz", + "integrity": "sha512-yuktAOaeOgorWDeFJggjuCkMGeITfqvPgkIXhDqsfKX8J3jGyxdDZgBV/2kj/2DyPaLiX6bPdjJDTu9RB8lUPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.44.1.tgz", + "integrity": "sha512-W+GBM4ifET1Plw8pdVaecwUgxmiH23CfAUj32u8knq0JPFyK4weRy6H7ooxYFD19YxBulL0Ktsflg5XS7+7u9g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.44.1.tgz", + "integrity": "sha512-1zqnUEMWp9WrGVuVak6jWTl4fEtrVKfZY7CvcBmUUpxAJ7WcSowPSAWIKa/0o5mBL/Ij50SIf9tuirGx63Ovew==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.44.1.tgz", + "integrity": "sha512-Rl3JKaRu0LHIx7ExBAAnf0JcOQetQffaw34T8vLlg9b1IhzcBgaIdnvEbbsZq9uZp3uAH+JkHd20Nwn0h9zPjA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.44.1.tgz", + "integrity": "sha512-j5akelU3snyL6K3N/iX7otLBIl347fGwmd95U5gS/7z6T4ftK288jKq3A5lcFKcx7wwzb5rgNvAg3ZbV4BqUSw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.44.1.tgz", + "integrity": "sha512-ppn5llVGgrZw7yxbIm8TTvtj1EoPgYUAbfw0uDjIOzzoqlZlZrLJ/KuiE7uf5EpTpCTrNt1EdtzF0naMm0wGYg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.44.1.tgz", + "integrity": "sha512-Hu6hEdix0oxtUma99jSP7xbvjkUM/ycke/AQQ4EC5g7jNRLLIwjcNwaUy95ZKBJJwg1ZowsclNnjYqzN4zwkAw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.44.1.tgz", + "integrity": "sha512-EtnsrmZGomz9WxK1bR5079zee3+7a+AdFlghyd6VbAjgRJDbTANJ9dcPIPAi76uG05micpEL+gPGmAKYTschQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.44.1.tgz", + "integrity": "sha512-iAS4p+J1az6Usn0f8xhgL4PaU878KEtutP4hqw52I4IO6AGoyOkHCxcc4bqufv1tQLdDWFx8lR9YlwxKuv3/3g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.44.1.tgz", + "integrity": "sha512-NtSJVKcXwcqozOl+FwI41OH3OApDyLk3kqTJgx8+gp6On9ZEt5mYhIsKNPGuaZr3p9T6NWPKGU/03Vw4CNU9qg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.44.1.tgz", + "integrity": "sha512-JYA3qvCOLXSsnTR3oiyGws1Dm0YTuxAAeaYGVlGpUsHqloPcFjPg+X0Fj2qODGLNwQOAcCiQmHub/V007kiH5A==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.44.1.tgz", + "integrity": "sha512-J8o22LuF0kTe7m+8PvW9wk3/bRq5+mRo5Dqo6+vXb7otCm3TPhYOJqOaQtGU9YMWQSL3krMnoOxMr0+9E6F3Ug==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@tsconfig/node20": { + "version": "20.1.6", + "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.6.tgz", + "integrity": "sha512-sz+Hqx9zwZDpZIV871WSbUzSqNIsXzghZydypnfgzPKLltVJfkINfUeTct31n/tTSa9ZE1ZOfKdRre1uHHquYQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/js-yaml": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "license": "MIT" + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.1.tgz", + "integrity": "sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@vitest/coverage-v8": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^1.0.2", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.17", + "magicast": "^0.3.5", + "std-env": "^3.9.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } + } + }, + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.3.tgz", + "integrity": "sha512-MuXMrSLVVoA6sYN/6Hke18vMzrT4TZNbZIj/hvh0fnYFpO+/kFXcLIaiPwXXWaQUPg4yJD8fj+lfJ7/1EBconw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "estree-walker": "^3.0.3", + "js-tokens": "^9.0.1" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chai": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", + "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.30.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.30.1.tgz", + "integrity": "sha512-zmxXPNMOXmwm9E0yQLi5uqXHs7uq2UIiqEKo3Gq+3fwo1XrJ+hijAZImyF7hclW3E6oHz43Yk3RP8at6OTKflQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.14.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.30.1", + "@eslint/plugin-kit": "^0.3.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expect-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.3.0.tgz", + "integrity": "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/loupe": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.4.tgz", + "integrity": "sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/magic-string": { + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/magicast": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", + "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.25.4", + "@babel/types": "^7.25.4", + "source-map-js": "^1.2.0" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/marked": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.0.0.tgz", + "integrity": "sha512-MUKMXDjsD/eptB7GPzxo4xcnLS6oo7/RHimUMHEDRhUooPwmN9BEpMl7AEOJv3bmso169wHI2wUF9VQgL7zfmA==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", + "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/rollup": { + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.44.1.tgz", + "integrity": "sha512-x8H8aPvD+xbl0Do8oez5f5o8eMS3trfCghc4HhLAnCkj7Vl0d1JWGs0UF/D886zLW2rOj2QymV/JcSSsw+XDNg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.44.1", + "@rollup/rollup-android-arm64": "4.44.1", + "@rollup/rollup-darwin-arm64": "4.44.1", + "@rollup/rollup-darwin-x64": "4.44.1", + "@rollup/rollup-freebsd-arm64": "4.44.1", + "@rollup/rollup-freebsd-x64": "4.44.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.44.1", + "@rollup/rollup-linux-arm-musleabihf": "4.44.1", + "@rollup/rollup-linux-arm64-gnu": "4.44.1", + "@rollup/rollup-linux-arm64-musl": "4.44.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.44.1", + "@rollup/rollup-linux-powerpc64le-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-musl": "4.44.1", + "@rollup/rollup-linux-s390x-gnu": "4.44.1", + "@rollup/rollup-linux-x64-gnu": "4.44.1", + "@rollup/rollup-linux-x64-musl": "4.44.1", + "@rollup/rollup-win32-arm64-msvc": "4.44.1", + "@rollup/rollup-win32-ia32-msvc": "4.44.1", + "@rollup/rollup-win32-x64-msvc": "4.44.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/simple-git": { + "version": "3.28.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.28.0.tgz", + "integrity": "sha512-Rs/vQRwsn1ILH1oBUy8NucJlXmnnLeLCfcvbSehkPzbv3wwoFWIdtfd6Ndo6ZPhlPsCZ60CPI4rxurnwAa+a2w==", + "license": "MIT", + "dependencies": { + "@kwsites/file-exists": "^1.1.1", + "@kwsites/promise-deferred": "^1.1.1", + "debug": "^4.4.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/steveukx/git-js?sponsor=1" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz", + "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^10.4.1", + "minimatch": "^9.0.4" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vite": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.0.0.tgz", + "integrity": "sha512-ixXJB1YRgDIw2OszKQS9WxGHKwLdCsbQNkpJN171udl6szi/rIySHL6/Os3s2+oE4P/FLD4dxg4mD7Wust+u5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.6", + "picomatch": "^4.0.2", + "postcss": "^8.5.6", + "rollup": "^4.40.0", + "tinyglobby": "^0.2.14" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/.github/shared/package.json b/.github/shared/package.json new file mode 100644 index 000000000000..80a0e954ff06 --- /dev/null +++ b/.github/shared/package.json @@ -0,0 +1,63 @@ +{ + "name": "@azure-tools/specs-shared", + "private": "true", + "type": "module", + "exports": { + "./array": "./src/array.js", + "./breaking-change": "./src/breaking-change.js", + "./changed-files": "./src/changed-files.js", + "./equality": "./src/equality.js", + "./error-reporting": "./src/error-reporting.js", + "./exec": "./src/exec.js", + "./logger": "./src/logger.js", + "./path": "./src/path.js", + "./readme": "./src/readme.js", + "./sdk-types": "./src/sdk-types.js", + "./sleep": "./src/sleep.js", + "./sort": "./src/sort.js", + "./spec-model-error": "./src/spec-model-error.js", + "./spec-model": "./src/spec-model.js", + "./swagger": "./src/swagger.js", + "./tag": "./src/tag.js", + "./test/examples": "./test/examples.js" + }, + "bin": { + "spec-model": "./cmd/spec-model.js" + }, + "_comments": { + "dependencies": "Runtime dependencies must be kept to an absolute minimum for performance, ideally with no transitive dependencies", + "dependencies2": "All runtime and dev dependencies in this file, must be a subset of ../package.json" + }, + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "debug": "^4.4.0", + "js-yaml": "^4.1.0", + "marked": "^16.0.0", + "simple-git": "^3.27.0" + }, + "devDependencies": { + "@eslint/js": "^9.22.0", + "@tsconfig/node20": "^20.1.4", + "@types/debug": "^4.1.12", + "@types/js-yaml": "^4.0.9", + "@types/node": "^20.0.0", + "@vitest/coverage-v8": "^3.0.7", + "cross-env": "^7.0.3", + "eslint": "^9.22.0", + "globals": "^16.0.0", + "prettier": "~3.5.3", + "semver": "^7.7.1", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + }, + "scripts": { + "lint": "npm run lint:eslint && npm run lint:tsc", + "lint:eslint": "cross-env DEBUG=eslint:eslint eslint", + "lint:tsc": "tsc --build --verbose", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", + "test": "vitest", + "test:ci": "vitest run --coverage --reporter=verbose" + } +} diff --git a/.github/src/README.md b/.github/shared/src/README.md similarity index 100% rename from .github/src/README.md rename to .github/shared/src/README.md diff --git a/.github/shared/src/array.js b/.github/shared/src/array.js new file mode 100644 index 000000000000..1279df5e3858 --- /dev/null +++ b/.github/shared/src/array.js @@ -0,0 +1,33 @@ +// @ts-check + +/** + * @template T + * @param {T[]} array + * @param {(item: T, index: number, array: T[]) => Promise} asyncPredicate + * @returns {Promise} + */ +export async function filterAsync(array, asyncPredicate) { + const results = await mapAsync(array, asyncPredicate); + return array.filter((_, i) => results[i]); +} + +/** + * @template T,U + * @param {T[]} array + * @param {(item: T, index: number, array: T[]) => Promise} asyncMapper + * @returns {Promise} + */ +export async function flatMapAsync(array, asyncMapper) { + const mapped = await mapAsync(array, asyncMapper); + return mapped.flat(); +} + +/** + * @template T,U + * @param {T[]} array + * @param {(item: T, index: number, array: T[]) => Promise} asyncMapper + * @returns {Promise} + */ +export async function mapAsync(array, asyncMapper) { + return Promise.all(array.map(asyncMapper)); +} diff --git a/.github/shared/src/breaking-change.js b/.github/shared/src/breaking-change.js new file mode 100644 index 000000000000..b9bc84dcf521 --- /dev/null +++ b/.github/shared/src/breaking-change.js @@ -0,0 +1,85 @@ +/** + * Breaking change configuration and constants for Azure REST API specs + * This file contains the single source of truth for all breaking change and versioning approval labels. + * + * Used across multiple tools in the Azure/azure-rest-api-specs repository. + */ + +// All versioning approval labels in one place +export const VERSIONING_APPROVALS = { + BENIGN: "Versioning-Approved-Benign", + BUG_FIX: "Versioning-Approved-BugFix", + PRIVATE_PREVIEW: "Versioning-Approved-PrivatePreview", + BRANCH_POLICY_EXCEPTION: "Versioning-Approved-BranchPolicyException", + PREVIOUSLY: "Versioning-Approved-Previously", + RETIRED: "Versioning-Approved-Retired", +}; + +// All breaking change approval labels in one place +export const BREAKING_CHANGE_APPROVALS = { + BENIGN: "BreakingChange-Approved-Benign", + BUG_FIX: "BreakingChange-Approved-BugFix", + USER_IMPACT: "BreakingChange-Approved-UserImpact", + BRANCH_POLICY_EXCEPTION: "BreakingChange-Approved-BranchPolicyException", + PREVIOUSLY: "BreakingChange-Approved-Previously", + SECURITY: "BreakingChange-Approved-Security", +}; + +// Review required labels +export const REVIEW_REQUIRED_LABELS = { + BREAKING_CHANGE: "BreakingChangeReviewRequired", + VERSIONING: "VersioningReviewRequired", +}; + +// Extract values as arrays for validation and configuration +export const versioningApprovalValues = Object.values(VERSIONING_APPROVALS); +export const breakingChangeApprovalValues = Object.values(BREAKING_CHANGE_APPROVALS); +export const reviewRequiredLabelValues = Object.values(REVIEW_REQUIRED_LABELS); + +// Type guard functions for runtime validation +/** + * @param {string} value + */ +export function isValidVersioningApproval(value) { + return versioningApprovalValues.includes(value); +} + +/** + * @param {string} value + */ +export function isValidBreakingChangeApproval(value) { + return breakingChangeApprovalValues.includes(value); +} + +/** + * @param {string} value + */ +export function isReviewRequiredLabel(value) { + return reviewRequiredLabelValues.includes(value); +} + +// Configuration for different check types +export const breakingChangesCheckType = { + SameVersion: { + reviewRequiredLabel: REVIEW_REQUIRED_LABELS.VERSIONING, + approvalPrefixLabel: "Versioning-Approved-*", + approvalLabels: versioningApprovalValues, + }, + CrossVersion: { + reviewRequiredLabel: REVIEW_REQUIRED_LABELS.BREAKING_CHANGE, + approvalPrefixLabel: "BreakingChange-Approved-*", + approvalLabels: breakingChangeApprovalValues, + }, +}; + +// Check types +export const BREAKING_CHANGES_CHECK_TYPES = { + SAME_VERSION: "SameVersion", + CROSS_VERSION: "CrossVersion", +}; + +// API version lifecycle stages +export const API_VERSION_LIFECYCLE_STAGES = { + PREVIEW: "preview", + STABLE: "stable", +}; diff --git a/.github/shared/src/changed-files.js b/.github/shared/src/changed-files.js new file mode 100644 index 000000000000..66ac8512e491 --- /dev/null +++ b/.github/shared/src/changed-files.js @@ -0,0 +1,226 @@ +// @ts-check + +import debug from "debug"; +import { simpleGit } from "simple-git"; + +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); + +/** + * @param {Object} [options] + * @param {string} [options.baseCommitish] Default: "HEAD^". + * @param {string} [options.cwd] Current working directory. Default: process.cwd(). + * @param {string} [options.headCommitish] Default: "HEAD". + * @param {import('./logger.js').ILogger} [options.logger] + * @returns {Promise} List of changed files, using posix paths, relative to options.cwd. Example: ["specification/foo/Microsoft.Foo/main.tsp"]. + */ +export async function getChangedFiles(options = {}) { + const { baseCommitish = "HEAD^", cwd, headCommitish = "HEAD", logger } = options; + + // TODO: If we need to filter based on status, instead of passing an argument to `--diff-filter, + // consider using "--name-status" instead of "--name-only", and return an array of objects like + // { name: "/foo/baz.js", status: Status.Renamed, previousName: "/foo/bar.js"}. + // Then add filter functions to filter based on status. This is more flexible and lets consumers + // filter based on status with a single call to `git diff`. + const result = await simpleGit(cwd).diff(["--name-only", baseCommitish, headCommitish]); + + const files = result.trim().split("\n"); + + logger?.info("Changed Files:"); + for (const file of files) { + logger?.info(` ${file}`); + } + logger?.info(""); + + return files; +} + +/** + * @param {Object} [options] + * @param {string} [options.baseCommitish] Default: "HEAD^". + * @param {string} [options.cwd] Current working directory. Default: process.cwd(). + * @param {string} [options.headCommitish] Default: "HEAD". + * @param {import('./logger.js').ILogger} [options.logger] + * @returns {Promise<{additions: string[], modifications: string[], deletions: string[], renames: {from: string, to: string}[], total: number}>} + */ +export async function getChangedFilesStatuses(options = {}) { + const { baseCommitish = "HEAD^", cwd, headCommitish = "HEAD", logger } = options; + try { + const result = await simpleGit(cwd).diff(["--name-status", baseCommitish, headCommitish]); + + const categorizedFiles = { + additions: /** @type {string[]} */ ([]), + modifications: /** @type {string[]} */ ([]), + deletions: /** @type {string[]} */ ([]), + renames: /** @type {{from: string, to: string}[]} */ ([]), + total: 0, + }; + + if (result.trim()) { + const lines = result.trim().split("\n"); + + for (const line of lines) { + const parts = line.split("\t"); + const status = parts[0]; + + switch (status[0]) { + case "A": + categorizedFiles.additions.push(parts[1]); + break; + case "M": + categorizedFiles.modifications.push(parts[1]); + break; + case "D": + categorizedFiles.deletions.push(parts[1]); + break; + case "R": + categorizedFiles.renames.push({ + from: parts[1], + to: parts[2], + }); + break; + case "C": + categorizedFiles.additions.push(parts[2]); + break; + default: + categorizedFiles.modifications.push(parts[1]); + } + } + + categorizedFiles.total = + categorizedFiles.additions.length + + categorizedFiles.modifications.length + + categorizedFiles.deletions.length + + categorizedFiles.renames.length; + } + + // Log all changed files by categories + if (logger) { + logger.info("Categorized Changed Files:"); + + if (categorizedFiles.additions.length > 0) { + logger.info(` Additions (${categorizedFiles.additions.length}):`); + for (const file of categorizedFiles.additions) { + logger.info(` + ${file}`); + } + } + + if (categorizedFiles.modifications.length > 0) { + logger.info(` Modifications (${categorizedFiles.modifications.length}):`); + for (const file of categorizedFiles.modifications) { + logger.info(` M ${file}`); + } + } + + if (categorizedFiles.deletions.length > 0) { + logger.info(` Deletions (${categorizedFiles.deletions.length}):`); + for (const file of categorizedFiles.deletions) { + logger.info(` - ${file}`); + } + } + + if (categorizedFiles.renames.length > 0) { + logger.info(` Renames (${categorizedFiles.renames.length}):`); + for (const rename of categorizedFiles.renames) { + logger.info(` R ${rename.from} -> ${rename.to}`); + } + } + + logger.info(` Total: ${categorizedFiles.total} files`); + logger.info(""); + } + + return categorizedFiles; + } catch (error) { + logger?.error(`Error getting categorized changed files: ${error}`); + return { + additions: /** @type {string[]} */ ([]), + modifications: /** @type {string[]} */ ([]), + deletions: /** @type {string[]} */ ([]), + renames: /** @type {{from: string, to: string}[]} */ ([]), + total: 0, + }; + } +} + +// Functions suitable for passing to string[].filter(), ordered roughly in order of increasing specificity + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function json(file) { + // Extension "json" with any case is a valid JSON file + return typeof file === "string" && file.toLowerCase().endsWith(".json"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function readme(file) { + // Filename "readme.md" with any case is a valid README file + return typeof file === "string" && file.toLowerCase().endsWith("readme.md"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function specification(file) { + // Folder name "specification" should match case, since it already exists in repo + return typeof file === "string" && file.startsWith("specification/"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function dataPlane(file) { + // Folder name "data-plane" should match case for consistency across specs + return typeof file === "string" && specification(file) && file.includes("/data-plane/"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function resourceManager(file) { + // Folder name "resource-manager" should match case for consistency across specs + return typeof file === "string" && specification(file) && file.includes("/resource-manager/"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function example(file) { + // Folder name "examples" should match case for consistency across specs + return ( + typeof file === "string" && json(file) && specification(file) && file.includes("/examples/") + ); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function swagger(file) { + return ( + typeof file === "string" && + json(file) && + (dataPlane(file) || resourceManager(file)) && + !example(file) && + !scenario(file) + ); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +export function scenario(file) { + return ( + typeof file === "string" && json(file) && specification(file) && file.includes("/scenarios/") + ); +} diff --git a/.github/src/equality.js b/.github/shared/src/equality.js similarity index 100% rename from .github/src/equality.js rename to .github/shared/src/equality.js diff --git a/.github/shared/src/error-reporting.js b/.github/shared/src/error-reporting.js new file mode 100644 index 000000000000..71a71a9eab9d --- /dev/null +++ b/.github/shared/src/error-reporting.js @@ -0,0 +1,33 @@ +// @ts-check +import * as fs from "fs"; + +/** + * Set the summary of the github step summary for a job. This feature is intended for formatted markdown, + * which can be used to display the results of a job in a more readable format. + * + * Format your results as a markdown table and go to town! + * @param {string} content + * @returns {void} + */ +export function setSummary(content) { + if (!process.env.GITHUB_STEP_SUMMARY) { + console.log("GITHUB_STEP_SUMMARY is not set. Skipping summary update."); + return; + } + const summaryFile = process.env.GITHUB_STEP_SUMMARY; + + fs.writeFileSync(summaryFile, content); +} + +/** + * This function is used to ask the github agent to annotate a file in a github PR with an error message. + * @param {string} repoPath + * @param {string} message + * @param {number} line + * @param {number} col + * @returns {void} + */ +export function annotateFileError(repoPath, message, line, col) { + const errorLine = `::error file=${repoPath},line=${line},col=${col}::${message}`; + console.log(errorLine); +} diff --git a/.github/shared/src/exec.js b/.github/shared/src/exec.js new file mode 100644 index 000000000000..d465d9b2dce4 --- /dev/null +++ b/.github/shared/src/exec.js @@ -0,0 +1,123 @@ +// @ts-check + +import child_process from "child_process"; +import { dirname, join } from "path"; +import { promisify } from "util"; +const execFileImpl = promisify(child_process.execFile); + +/** + * @typedef {Object} ExecOptions + * @property {string} [cwd] Current working directory. Default: process.cwd(). + * @property {import('./logger.js').ILogger} [logger] + * @property {number} [maxBuffer] Max bytes allowed on stdout or stderr. Default: 16 * 1024 * 1024. + */ + +/** + * @typedef {Object} ExecResult + * @property {string} stdout + * @property {string} stderr + */ + +/** + * @typedef {Error & { stdout?: string, stderr?: string, code?: number }} ExecError + */ + +/** + * Checks whether an unknown error object is an ExecError. + * @param {unknown} error + * @returns {error is ExecError} + */ +export function isExecError(error) { + if (!(error instanceof Error)) return false; + + const e = /** @type {ExecError} */ (error); + return typeof e.stdout === "string" || typeof e.stderr === "string"; +} + +/** + * Wraps `child_process.execFile()`, adding logging and a larger default maxBuffer. + * + * @param {string} file + * @param {string[]} args + * @param {ExecOptions} [options] + * @returns {Promise} + * @throws {ExecError} + */ +export async function execFile(file, args, options = {}) { + const { + cwd, + logger, + // Node default is 1024 * 1024, which is too small for some git commands returning many entities or large file content. + // To support "git show", should be larger than the largest swagger file in the repo (2.5 MB as of 2/28/2025). + maxBuffer = 16 * 1024 * 1024, + } = options; + + logger?.info(`execFile("${file}", ${JSON.stringify(args)})`); + + try { + // execFile(file, args) is more secure than exec(cmd), since the latter is vulnerable to shell injection + const result = await execFileImpl(file, args, { + cwd, + maxBuffer, + }); + + logger?.debug(`stdout: '${result.stdout}'`); + logger?.debug(`stderr: '${result.stderr}'`); + + return result; + } catch (error) { + /* v8 ignore next */ + logger?.debug(`error: '${JSON.stringify(error)}'`); + + throw error; + } +} + +/** + * Calls `execFile()` with appropriate arguments to run `npm` on all platforms + * + * @param {string[]} args + * @param {ExecOptions} [options] + * @returns {Promise} + * @throws {ExecError} + */ +export async function execNpm(args, options = {}) { + // Exclude platform-specific code from coverage + /* v8 ignore start */ + const { file, defaultArgs } = + process.platform === "win32" + ? { + // Only way I could find to run "npm" on Windows, without using the shell (e.g. "cmd /c npm ...") + // + // "node.exe", ["--", "npm-cli.js", ...args] + // + // The "--" MUST come BEFORE "npm-cli.js", to ensure args are sent to the script unchanged. + // If the "--" comes after "npm-cli.js", the args sent to the script will be ["--", ...args], + // which is NOT equivalent, and can break if args itself contains another "--". + + // example: "C:\Program Files\nodejs\node.exe" + file: process.execPath, + + // example: "C:\Program Files\nodejs\node_modules\npm\bin\npm-cli.js" + defaultArgs: [ + "--", + join(dirname(process.execPath), "node_modules", "npm", "bin", "npm-cli.js"), + ], + } + : { file: "npm", defaultArgs: [] }; + /* v8 ignore stop */ + + return await execFile(file, [...defaultArgs, ...args], options); +} + +/** + * Calls `execNpm()` with arguments ["exec", "--no", "--"] prepended. + * + * @param {string[]} args + * @param {ExecOptions} [options] + * @returns {Promise} + * @throws {ExecError} + */ +export async function execNpmExec(args, options = {}) { + return await execNpm(["exec", "--no", "--", ...args], options); +} diff --git a/.github/shared/src/logger.js b/.github/shared/src/logger.js new file mode 100644 index 000000000000..8ccccd1ace86 --- /dev/null +++ b/.github/shared/src/logger.js @@ -0,0 +1,58 @@ +// @ts-check + +/** + * @typedef {Object} ILogger + * @property {(message:string) => void} debug + * @property {(message:string) => void} error + * @property {(message:string) => void} info + * @property {() => boolean} isDebug + */ + +/** + * @implements {ILogger} + */ +export class ConsoleLogger { + /** @type {boolean} */ + #isDebug; + + /** + * @param {boolean} [isDebug] - If true, debug logs will be printed. Default: false. + */ + constructor(isDebug = false) { + this.#isDebug = isDebug; + } + + /** + * @param {string} message + */ + debug(message) { + if (this.isDebug()) { + console.debug(message); + } + } + + /** + * @param {string} message + */ + error(message) { + console.error(message); + } + + /** + * @param {string} message + */ + info(message) { + console.log(message); + } + + /** + * @returns {boolean} + */ + isDebug() { + return this.#isDebug; + } +} + +// Singleton loggers +export const defaultLogger = new ConsoleLogger(); +export const debugLogger = new ConsoleLogger(/*isDebug*/ true); diff --git a/.github/shared/src/path.js b/.github/shared/src/path.js new file mode 100644 index 000000000000..35382461bfad --- /dev/null +++ b/.github/shared/src/path.js @@ -0,0 +1,13 @@ +// @ts-check + +import { resolve, sep } from "path"; + +/** + * + * @param {string} path + * @param {string} folder + * @returns {boolean} True if path contains the named folder + */ +export function includesFolder(path, folder) { + return resolve(path).includes(sep + folder + sep); +} diff --git a/.github/shared/src/readme.js b/.github/shared/src/readme.js new file mode 100644 index 000000000000..f6881719bd94 --- /dev/null +++ b/.github/shared/src/readme.js @@ -0,0 +1,231 @@ +// @ts-check + +import { readFile } from "fs/promises"; +import yaml from "js-yaml"; +import { marked } from "marked"; +import { dirname, normalize, relative, resolve } from "path"; +import { mapAsync } from "./array.js"; +import { Tag } from "./tag.js"; + +/** + * @typedef {import('./spec-model.js').SpecModel} SpecModel + * @typedef {import('./spec-model.js').ToJSONOptions} ToJSONOptions + */ + +/** + * Regex to match tag names in readme.md yaml code blocks + */ +export const TagMatchRegex = /yaml.*\$\(tag\) ?== ?(["'])(.*?)\1/; + +export class Readme { + /** + * Content of `readme.md`, either loaded from `#path` or passed in via `options`. + * + * Reset to `undefined` after `#data` is loaded to save memory. + * + * @type {string | undefined} + */ + #content; + + /** @type {{globalConfig: Object, tags: Map} | undefined} */ + #data; + + /** @type {import('./logger.js').ILogger | undefined} */ + #logger; + + /** + * absolute path + * @type {string} + * */ + #path; + + /** + * SpecModel that contains this Readme + * @type {SpecModel | undefined} + */ + #specModel; + + /** + * @param {string} path Used for content, unless options.content is specified + * @param {Object} [options] + * @param {string} [options.content] If specified, is used instead of reading path from disk + * @param {import('./logger.js').ILogger} [options.logger] + * @param {SpecModel} [options.specModel] + */ + constructor(path, options) { + this.#path = resolve(options?.specModel?.folder ?? "", path); + + this.#content = options?.content; + this.#logger = options?.logger; + this.#specModel = options?.specModel; + } + + /** + * @param {string} swaggerPath + * @param {import('./logger.js').ILogger} [logger] + * @returns {string} + */ + static #normalizeSwaggerPath(swaggerPath, logger) { + let swaggerPathNormalized = swaggerPath; + // Ignore uses of "$(this-folder)" in the swagger path. It refers to the + // current folder anyway and can be substituted with "." + if (swaggerPath.includes("$(this-folder)")) { + swaggerPathNormalized = swaggerPath.replaceAll("$(this-folder)", "."); + } + + // Some swagger paths contain backslashes. These should be normalized when + // encountered though the expected format for input-files is forward slashes. + if (swaggerPathNormalized.includes("\\")) { + /* v8 ignore next */ + logger?.info( + `Found backslash (\\) in swagger path ${swaggerPath}. Replacing with forward slash (/)`, + ); + + swaggerPathNormalized = swaggerPathNormalized.replaceAll("\\", "/"); + } + + return normalize(swaggerPathNormalized); + } + + async #getData() { + if (!this.#data) { + // Only read file if #content is exactly undefined, to allow setting #content to empty string + // to simulate an empty file + if (this.#content === undefined) { + this.#content = await readFile(this.#path, { + encoding: "utf8", + }); + } + + const tokens = marked.lexer(this.#content); + + /** @type import("marked").Tokens.Code[] */ + const yamlBlocks = tokens + .filter((token) => token.type === "code") + .map((token) => /** @type import("marked").Tokens.Code */ (token)) + // Include default block and tagged blocks (```yaml $(tag) == 'package-2021-11-01') + .filter((token) => token.lang?.toLowerCase().startsWith("yaml")); + + const globalConfigYamlBlocks = yamlBlocks.filter((token) => token.lang === "yaml"); + + const globalConfig = globalConfigYamlBlocks.reduce( + (obj, token) => Object.assign(obj, yaml.load(token.text, { schema: yaml.FAILSAFE_SCHEMA })), + {}, + ); + + /** @type {Map} */ + const tags = new Map(); + for (const block of yamlBlocks) { + const tagName = block.lang?.match(TagMatchRegex)?.[2] || "default"; + + if (tagName === "default" || tagName === "all-api-versions") { + // Skip yaml blocks where this is no tag or tag is all-api-versions + continue; + } + + const obj = /** @type {any} */ (yaml.load(block.text, { schema: yaml.FAILSAFE_SCHEMA })); + + if (!obj) { + this.#logger?.debug(`No yaml object found for tag ${tagName} in ${this.#path}`); + continue; + } + + if (!obj["input-file"]) { + // The yaml block does not contain an input-file key + continue; + } + + // This heuristic assumes that a previous definition of the tag with no + // swaggers means that the previous definition did not have an input-file + // key. It's possible that the previous defintion had an `input-file: []` + // or something like it. + const existingTag = tags.get(tagName); + if ((existingTag?.inputFiles?.size ?? 0) > 0) { + // The tag already exists and has a swagger file. This is an error as + // there should only be one definition of input-files per tag. + const message = `Multiple input-file definitions for tag ${tagName} in ${this.#path}`; + this.#logger?.error(message); + throw new Error(message); + } + + // It's possible for input-file to be a string or an array + const inputFilePaths = Array.isArray(obj["input-file"]) + ? obj["input-file"] + : [obj["input-file"]]; + + const swaggerPathsResolved = inputFilePaths + .map((p) => Readme.#normalizeSwaggerPath(p)) + .map((p) => resolve(dirname(this.#path), p)); + + const tag = new Tag(tagName, swaggerPathsResolved, { + logger: this.#logger, + readme: this, + }); + + tags.set(tag.name, tag); + } + + this.#data = { globalConfig, tags }; + + // Clear #content to save memory, since it's no longer needed after #data is loaded + this.#content = undefined; + } + + return this.#data; + } + + /** + * @returns {Promise} + */ + async getGlobalConfig() { + return (await this.#getData()).globalConfig; + } + + /** + * @returns {Promise>} + */ + async getTags() { + return (await this.#getData()).tags; + } + + /** + * @returns {string} absolute path + */ + get path() { + return this.#path; + } + + /** + * @returns {SpecModel | undefined} SpecModel that contains this Readme + */ + get specModel() { + return this.#specModel; + } + + /** + * @param {ToJSONOptions} [options] + * @returns {Promise} + */ + async toJSONAsync(options) { + const tags = await mapAsync( + [...(await this.getTags()).values()].sort((a, b) => a.name.localeCompare(b.name)), + async (t) => await t.toJSONAsync(options), + ); + + return { + path: + options?.relativePaths && this.#specModel + ? relative(this.#specModel.folder, this.#path) + : this.#path, + globalConfig: await this.getGlobalConfig(), + tags, + }; + } + + /** + * @returns {string} + */ + toString() { + return `Readme(${this.#path}, {logger: ${this.#logger}})`; + } +} diff --git a/.github/src/sdk-types.js b/.github/shared/src/sdk-types.js similarity index 77% rename from .github/src/sdk-types.js rename to .github/shared/src/sdk-types.js index 1d3b057b48a4..ddf02356573f 100644 --- a/.github/src/sdk-types.js +++ b/.github/shared/src/sdk-types.js @@ -1,3 +1,5 @@ +/* v8 ignore start */ + // @ts-check /** @@ -5,21 +7,28 @@ */ /** - * SDK labels mapping for breaking change labels - * @type {Object.} + * }} SdkLabelInfo + */ + +/** + * @typedef {Record} SdkLabels */ + +/** + * SDK labels mapping for breaking change labels + * @type {SdkLabels} + * */ export const sdkLabels = { "azure-sdk-for-go": { breakingChange: "BreakingChange-Go-Sdk", breakingChangeApproved: "BreakingChange-Go-Sdk-Approved", breakingChangeSuppression: "BreakingChange-Go-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-Go-Sdk-Suppression-Approved", + breakingChangeSuppressionApproved: "BreakingChange-Go-Sdk-Suppression-Approved", }, "azure-sdk-for-java": { breakingChange: undefined, @@ -31,8 +40,7 @@ export const sdkLabels = { breakingChange: "BreakingChange-JavaScript-Sdk", breakingChangeApproved: "BreakingChange-JavaScript-Sdk-Approved", breakingChangeSuppression: "BreakingChange-JavaScript-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-JavaScript-Sdk-Suppression-Approved", + breakingChangeSuppressionApproved: "BreakingChange-JavaScript-Sdk-Suppression-Approved", }, "azure-sdk-for-net": { breakingChange: undefined, @@ -44,7 +52,6 @@ export const sdkLabels = { breakingChange: "BreakingChange-Python-Sdk", breakingChangeApproved: "BreakingChange-Python-Sdk-Approved", breakingChangeSuppression: "BreakingChange-Python-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-Python-Sdk-Suppression-Approved", + breakingChangeSuppressionApproved: "BreakingChange-Python-Sdk-Suppression-Approved", }, }; diff --git a/.github/shared/src/sleep.js b/.github/shared/src/sleep.js new file mode 100644 index 000000000000..a477489eedfc --- /dev/null +++ b/.github/shared/src/sleep.js @@ -0,0 +1,9 @@ +// @ts-check + +/** + * @param {number} ms Number of milliseconds to sleep + * @returns {Promise} + */ +export async function sleep(ms) { + await new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/.github/shared/src/sort.js b/.github/shared/src/sort.js new file mode 100644 index 000000000000..befce40a0147 --- /dev/null +++ b/.github/shared/src/sort.js @@ -0,0 +1,45 @@ +// @ts-check + +/** + * Returns a comparator that compares values by a date string in ascending order. + * Throws if the value returned by getDate() is null, undefined, or cannot be + * parsed as a date. + * + * @template T + * @param {(item: T) => string} getDate + * @returns {(a: T, b: T) => number} + */ +export function byDate(getDate) { + return (a, b) => { + // Sort ascending to match JS default + return parseDate(getDate(a)) - parseDate(getDate(b)); + }; +} + +/** + * Parses a string to a date, throwing if null, undefined, or cannot be parsed. + * + * @param {string} s + * @returns {number} + */ +function parseDate(s) { + // Date.parse() returns NaN for null, undefined, or strings that cannot be parsed. + const parsed = Date.parse(s); + + if (Number.isNaN(parsed)) { + throw new Error(`Unable to parse '${s}' to a valid date`); + } + + return parsed; +} + +/** + * Inverts a comparator function. + * + * @template T + * @param {(a: T, b: T) => number} comparator + * @returns {(a: T, b: T) => number} + */ +export function invert(comparator) { + return (a, b) => -comparator(a, b); +} diff --git a/.github/shared/src/spec-model-error.js b/.github/shared/src/spec-model-error.js new file mode 100644 index 000000000000..19abbdb0903a --- /dev/null +++ b/.github/shared/src/spec-model-error.js @@ -0,0 +1,49 @@ +// @ts-check + +export class SpecModelError extends Error { + /** + * Path to file that caused the error + * + * @type {string|undefined} + */ + source; + + /** + * Path to readme that caused the error (if known) + * @type {string|undefined} + */ + readme; + + /** + * Name of tag that caused the error (if known) + * @type {string|undefined} + */ + tag; + + /** + * @param {string} message + * @param {Object} [options] + * @param {Error} [options.cause] + * @param {string} [options.source] Path to file that caused the error + * @param {string} [options.readme] Path to readme that caused the error (if known) + * @param {string} [options.tag] Name of tag that caused the error (if known) + */ + constructor(message, options) { + super(message, { cause: options?.cause }); + + this.name = this.constructor.name; + + this.source = options?.source; + this.readme = options?.readme; + this.tag = options?.tag; + } + + toString() { + return ( + `SpecModelError: ${this.message}` + + `${this.source ? `\n\tProblem File: ${this.source}` : ""}` + + `${this.readme ? `\n\tReadme: ${this.readme}` : ""}` + + `${this.tag ? `\n\tTag: ${this.tag}` : ""}` + ); + } +} diff --git a/.github/shared/src/spec-model.js b/.github/shared/src/spec-model.js new file mode 100644 index 000000000000..6119777f03d5 --- /dev/null +++ b/.github/shared/src/spec-model.js @@ -0,0 +1,219 @@ +// @ts-check + +import { readdir } from "fs/promises"; +import { resolve } from "path"; +import { mapAsync } from "./array.js"; +import { Readme } from "./readme.js"; + +/** + * @typedef {Object} ToJSONOptions + * @prop {boolean} [includeRefs] + * @prop {boolean} [relativePaths] + * + * @typedef {import('./swagger.js').Swagger} Swagger + * @typedef {import('./tag.js').Tag} Tag + */ + +export class SpecModel { + /** @type {string} absolute path */ + #folder; + + /** @type {import('./logger.js').ILogger | undefined} */ + #logger; + + /** @type {Map | undefined} */ + #readmes; + + /** + * @param {string} folder + * @param {Object} [options] + * @param {import('./logger.js').ILogger} [options.logger] + */ + constructor(folder, options) { + this.#folder = resolve(folder); + this.#logger = options?.logger; + } + + /** + * @returns {string} absolute path + */ + get folder() { + return this.#folder; + } + + /** + * Given a swagger file, return all the tags inside readme files that reference the file (directly or indirectly). + * @param {string} swaggerPath + * @returns {Promise>>} map of readme paths to (map of tag names to Tag objects) + */ + async getAffectedReadmeTags(swaggerPath) { + const swaggerPathResolved = resolve(swaggerPath); + + /** @type {Map>} */ + const affectedReadmeTags = new Map(); + + for (const readme of (await this.getReadmes()).values()) { + for (const tag of (await readme.getTags()).values()) { + for (const inputFile of tag.inputFiles.values()) { + if (inputFile.path === swaggerPathResolved) { + /** @type {Map} */ + const tags = affectedReadmeTags.get(readme.path) ?? new Map(); + tags.set(tag.name, tag); + affectedReadmeTags.set(readme.path, tags); + + // No need to check refs if the swagger file is directly referenced + continue; + } + + const refs = await inputFile.getRefs(); + if (refs.get(swaggerPathResolved)) { + /** @type {Map} */ + const tags = affectedReadmeTags.get(readme.path) ?? new Map(); + tags.set(tag.name, tag); + affectedReadmeTags.set(readme.path, tags); + } + } + } + } + + return affectedReadmeTags; + } + + /** + * Given a swagger file, return the swagger files that are affected by the + * changes in the given swagger file. + * @param {string} swaggerPath + * @returns {Promise>} map of swagger paths to Swagger objects + */ + async getAffectedSwaggers(swaggerPath) { + const swaggerPathResolved = resolve(swaggerPath); + + /** @type {Map} */ + const affectedSwaggers = new Map(); + + for (const readme of (await this.getReadmes()).values()) { + for (const tag of (await readme.getTags()).values()) { + for (const swagger of tag.inputFiles.values()) { + // readme.md includes swaggerPath + if (swagger.path === swaggerPathResolved) { + affectedSwaggers.set(swagger.path, swagger); + } + + const refs = await swagger.getRefs(); + + // readme.md includes a.json + // a.json references swaggerPath + const refToSwaggerPath = refs.get(swaggerPathResolved); + if (refToSwaggerPath) { + // Add the Swagger object for swaggerPath + affectedSwaggers.set(refToSwaggerPath.path, refToSwaggerPath); + + // Add the Swagger object that references swaggerPath + // + // Example: a.json + affectedSwaggers.set(swagger.path, swagger); + } + + // readme.md includes a.json + // a.json references b.json + // b.json references swaggerPath + for (const ref of refs.values()) { + const refRefs = await ref.getRefs(); + const refRefToSwaggerPath = refRefs.get(swaggerPathResolved); + if (refRefToSwaggerPath) { + // Add the Swagger object for swaggerPath + affectedSwaggers.set(refRefToSwaggerPath.path, refRefToSwaggerPath); + + // Add the Swagger object that references swaggerPath + // + // Example: b.json + affectedSwaggers.set(ref.path, ref); + + // Add the Swagger object that references the Swagger object + // that references swaggerPath + // + // Example: a.json + // + // Note: This may not be strictly necessary, since getRefs() includes + // transitive references, so "a.json" should have already been added + // above. However, it's safer to add it, in case somehow it wasn't added + // earlier, since we know it's in the dependency chain. + affectedSwaggers.set(swagger.path, swagger); + } + } + } + } + } + + // The swagger file supplied does not exist in the given specModel + if (affectedSwaggers.size === 0) { + throw new Error(`No affected swaggers found in specModel for ${swaggerPath}`); + } + + return affectedSwaggers; + } + + /** + * @returns {Promise>} map of readme paths to readme Objects + */ + async getReadmes() { + if (!this.#readmes) { + const files = await readdir(this.#folder, { + recursive: true, + }); + + const readmePaths = files + // filter before resolve to (slightly) improve perf, since filter only needs filename + .filter(readme) + .map((p) => resolve(this.#folder, p)); + + this.#logger?.debug(`Found ${readmePaths.length} readme files`); + + this.#readmes = new Map( + readmePaths.map((p) => { + const readme = new Readme(p, { + logger: this.#logger, + specModel: this, + }); + return [readme.path, readme]; + }), + ); + } + + return this.#readmes; + } + + /** + * @param {ToJSONOptions} [options] + * @returns {Promise} + */ + async toJSONAsync(options) { + const readmes = await mapAsync( + [...(await this.getReadmes()).values()].sort((a, b) => a.path.localeCompare(b.path)), + async (r) => await r.toJSONAsync(options), + ); + + return { + folder: this.#folder, + readmes, + }; + } + + /** + * @returns {string} + */ + toString() { + return `SpecModel(${this.#folder}, {logger: ${this.#logger}}})`; + } +} + +// TODO: Remove duplication with changed-files.js (which currently requires paths relative to repo root) + +/** + * @param {string} [file] + * @returns {boolean} + */ +function readme(file) { + // Filename "readme.md" with any case is a valid README file + return typeof file === "string" && file.toLowerCase().endsWith("readme.md"); +} diff --git a/.github/shared/src/swagger.js b/.github/shared/src/swagger.js new file mode 100644 index 000000000000..6427bc64f74d --- /dev/null +++ b/.github/shared/src/swagger.js @@ -0,0 +1,179 @@ +// @ts-check + +import $RefParser, { ResolverError } from "@apidevtools/json-schema-ref-parser"; +import { readFile } from "fs/promises"; +import { dirname, relative, resolve } from "path"; +import { mapAsync } from "./array.js"; +import { includesFolder } from "./path.js"; +import { SpecModelError } from "./spec-model-error.js"; + +/** + * @typedef {import('./spec-model.js').Tag} Tag + * @typedef {import('./spec-model.js').ToJSONOptions} ToJSONOptions + */ + +/** + * @type {import('@apidevtools/json-schema-ref-parser').ResolverOptions} + */ +const excludeExamples = { + order: 1, + canRead: true, + read: async ( + /** @type import('@apidevtools/json-schema-ref-parser').FileInfo */ + file, + ) => { + if (example(file.url)) { + return ""; + } + return await readFile(file.url, { encoding: "utf8" }); + }, +}; + +export class Swagger { + /** @type {import('./logger.js').ILogger | undefined} */ + #logger; + + /** @type {string} absolute path */ + #path; + + /** @type {Map | undefined} */ + #refs; + + /** @type {Tag | undefined} Tag that contains this Swagger */ + #tag; + + /** + * @param {string} path + * @param {Object} [options] + * @param {import('./logger.js').ILogger} [options.logger] + * @param {Tag} [options.tag] + */ + constructor(path, options) { + const rootDir = dirname(options?.tag?.readme?.path ?? ""); + this.#path = resolve(rootDir, path); + this.#logger = options?.logger; + this.#tag = options?.tag; + } + + /** + * @returns {Promise>} + */ + async getRefs() { + const allRefs = await this.#getRefs(); + + // filter out any paths that are examples + const filtered = new Map([...allRefs].filter(([path]) => !example(path))); + + return filtered; + } + + async #getRefs() { + if (!this.#refs) { + let schema; + try { + schema = await $RefParser.resolve(this.#path, { + resolve: { file: excludeExamples, http: false }, + }); + } catch (error) { + if (error instanceof ResolverError) { + throw new SpecModelError(`Failed to resolve file for swagger: ${this.#path}`, { + cause: error, + source: error.source, + tag: this.#tag?.name, + readme: this.#tag?.readme?.path, + }); + } + + throw error; + } + + const refPaths = schema + .paths("file") + // Exclude ourself + .filter((p) => resolve(p) !== resolve(this.#path)); + + this.#refs = new Map( + refPaths.map((p) => { + const swagger = new Swagger(p, { + logger: this.#logger, + tag: this.#tag, + }); + return [swagger.path, swagger]; + }), + ); + } + + return this.#refs; + } + + /** + * @returns {Promise>} + */ + async getExamples() { + const allRefs = await this.#getRefs(); + + // filter out any paths that are examples + const filtered = new Map([...allRefs].filter(([path]) => example(path))); + + return filtered; + } + + /** + * @returns {string} absolute path + */ + get path() { + return this.#path; + } + + /** + * @returns {Tag | undefined} Tag that contains this Swagger + */ + get tag() { + return this.#tag; + } + + /** + * @param {ToJSONOptions} [options] + * @returns {Promise} + */ + async toJSONAsync(options) { + return { + path: + options?.relativePaths && this.#tag?.readme?.specModel + ? relative(this.#tag?.readme?.specModel.folder, this.#path) + : this.#path, + refs: options?.includeRefs + ? await mapAsync( + [...(await this.getRefs()).values()].sort((a, b) => a.path.localeCompare(b.path)), + async (s) => + // Do not include swagger refs transitively, otherwise we could get in infinite loop + await s.toJSONAsync({ ...options, includeRefs: false }), + ) + : undefined, + }; + } + + toString() { + return `Swagger(${this.#path}, {logger: ${this.#logger}})`; + } +} + +// TODO: Remove duplication with changed-files.js (which currently requires paths relative to repo root) + +/** + * @param {string} [file] + * @returns {boolean} + */ +function example(file) { + // Folder name "examples" should match case for consistency across specs + return typeof file === "string" && json(file) && includesFolder(file, "examples"); +} + +/** + * @param {string} [file] + * @returns {boolean} + */ +function json(file) { + // Extension "json" with any case is a valid JSON file + return typeof file === "string" && file.toLowerCase().endsWith(".json"); +} diff --git a/.github/shared/src/tag.js b/.github/shared/src/tag.js new file mode 100644 index 000000000000..2f3a29d187f0 --- /dev/null +++ b/.github/shared/src/tag.js @@ -0,0 +1,85 @@ +// @ts-check + +import { mapAsync } from "./array.js"; +import { Swagger } from "./swagger.js"; + +/** + * @typedef {import('./readme.js').Readme} Readme + * @typedef {import('./spec-model.js').ToJSONOptions} ToJSONOptions + */ + +export class Tag { + /** @type {Map} */ + #inputFiles; + + /** @type {import('./logger.js').ILogger | undefined} */ + #logger; + + /** @type {string} */ + #name; + + /** + * Readme that contains this Tag + * @type {Readme | undefined} + */ + #readme; + + /** + * @param {string} name + * @param {string[]} inputFilePaths + * @param {Object} [options] + * @param {import('./logger.js').ILogger} [options.logger] + * @param {Readme} [options.readme] + */ + constructor(name, inputFilePaths, options) { + this.#name = name; + this.#logger = options?.logger; + this.#readme = options?.readme; + + this.#inputFiles = new Map( + inputFilePaths.map((p) => { + let swagger = new Swagger(p, { logger: this.#logger, tag: this }); + return [swagger.path, swagger]; + }), + ); + } + + /** + * @returns {Map} + */ + get inputFiles() { + return this.#inputFiles; + } + + /** + * @returns {string} + */ + get name() { + return this.#name; + } + + /** + * @returns {Readme | undefined} Readme that contains this Tag + */ + get readme() { + return this.#readme; + } + + /** + * @param {ToJSONOptions} [options] + * @returns {Promise} + */ + async toJSONAsync(options) { + return { + name: this.#name, + inputFiles: await mapAsync( + [...this.#inputFiles.values()].sort((a, b) => a.path.localeCompare(b.path)), + async (s) => await s.toJSONAsync(options), + ), + }; + } + + toString() { + return `Tag(${this.#name}, {logger: ${this.#logger}})`; + } +} diff --git a/.github/shared/test/array.test.js b/.github/shared/test/array.test.js new file mode 100644 index 000000000000..007449622882 --- /dev/null +++ b/.github/shared/test/array.test.js @@ -0,0 +1,40 @@ +// @ts-check + +import { describe, expect, it } from "vitest"; +import { filterAsync, flatMapAsync, mapAsync } from "../src/array.js"; +import { sleep } from "../src/sleep.js"; + +describe("array", () => { + it("filterAsync", async () => { + const input = [1, 2, 3]; + + const result = await filterAsync(input, async (item, index) => { + await sleep(index); + return item === 1 || index === 1; + }); + + expect(result).toEqual([1, 2]); + }); + + it("flatMapAsync", async () => { + const input = [1, 2, 3]; + + const result = await flatMapAsync(input, async (item, index) => { + await sleep(index); + return [index, item * index]; + }); + + expect(result).toEqual([0, 0, 1, 2, 2, 6]); + }); + + it("mapAsync", async () => { + const input = [1, 2, 3]; + + const result = await mapAsync(input, async (item, index) => { + await sleep(index); + return item * index; + }); + + expect(result).toEqual([0, 2, 6]); + }); +}); diff --git a/.github/shared/test/changed-files.test.js b/.github/shared/test/changed-files.test.js new file mode 100644 index 000000000000..ae6feda82191 --- /dev/null +++ b/.github/shared/test/changed-files.test.js @@ -0,0 +1,238 @@ +// @ts-check + +import { afterEach, describe, expect, it, vi } from "vitest"; + +vi.mock("simple-git", () => ({ + simpleGit: vi.fn().mockReturnValue({ + diff: vi.fn().mockResolvedValue(""), + }), +})); + +import * as simpleGit from "simple-git"; +import { + dataPlane, + example, + getChangedFiles, + getChangedFilesStatuses, + json, + readme, + resourceManager, + specification, + swagger, + scenario, +} from "../src/changed-files.js"; +import { debugLogger } from "../src/logger.js"; + +describe("changedFiles", () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it.each([{}, { logger: debugLogger }])(`getChangedFiles(%o)`, async (options) => { + const files = [ + ".github/src/changed-files.js", + "specification/contosowidgetmanager/Contoso.Management/main.tsp", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + ]; + + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue(files.join("\n")); + + await expect(getChangedFiles(options)).resolves.toEqual(files); + }); + + const files = [ + "cspell.json", + "cspell.yaml", + "MixedCase.jSoN", + "README.MD", + "specification/contosowidgetmanager/data-plane/readme.md", + "specification/contosowidgetmanager/Contoso.Management/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", + "specification/contosowidgetmanager/resource-manager/readme.md", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + "specification/contosowidgetmanager/Contoso.Management/scenarios/2021-11-01/Employees_Get.json", + ]; + + it("filter:json", () => { + const expected = [ + "cspell.json", + "MixedCase.jSoN", + "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + "specification/contosowidgetmanager/Contoso.Management/scenarios/2021-11-01/Employees_Get.json", + ]; + + expect(files.filter(json)).toEqual(expected); + }); + + it("filter:readme", () => { + const expected = [ + "README.MD", + "specification/contosowidgetmanager/data-plane/readme.md", + "specification/contosowidgetmanager/resource-manager/readme.md", + ]; + + expect(files.filter(readme)).toEqual(expected); + }); + + it("filter:specification", () => { + const expected = [ + "specification/contosowidgetmanager/data-plane/readme.md", + "specification/contosowidgetmanager/Contoso.Management/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", + "specification/contosowidgetmanager/resource-manager/readme.md", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + "specification/contosowidgetmanager/Contoso.Management/scenarios/2021-11-01/Employees_Get.json", + ]; + + expect(files.filter(specification)).toEqual(expected); + }); + + it("filter:data-plane", () => { + const expected = ["specification/contosowidgetmanager/data-plane/readme.md"]; + + expect(files.filter(dataPlane)).toEqual(expected); + }); + + it("filter:resource-manager", () => { + const expected = [ + "specification/contosowidgetmanager/resource-manager/readme.md", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + ]; + + expect(files.filter(resourceManager)).toEqual(expected); + }); + + it("filter:example", () => { + const expected = [ + "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", + ]; + + expect(files.filter(example)).toEqual(expected); + }); + + it("filter:scenarios", () => { + const expected = [ + "specification/contosowidgetmanager/Contoso.Management/scenarios/2021-11-01/Employees_Get.json", + ]; + + expect(files.filter(scenario)).toEqual(expected); + }); + + it("filter:swagger", () => { + const expected = [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + ]; + + expect(files.filter(swagger)).toEqual(expected); + }); + + describe("getChangedFilesStatuses", () => { + it("should categorize files correctly with all types of changes", async () => { + const gitOutput = [ + "A\tspecification/new-service/readme.md", + "M\tspecification/existing-service/main.tsp", + "D\tspecification/old-service/contoso.json", + "R100\tspecification/service/old-name.json\tspecification/service/new-name.json", + "C90\tspecification/template/base.json\tspecification/service/derived.json", + "T\tspecification/service/type-changed.json", + ].join("\n"); + + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue(gitOutput); + const result = await getChangedFilesStatuses(); + expect(result).toEqual({ + additions: ["specification/new-service/readme.md", "specification/service/derived.json"], + modifications: [ + "specification/existing-service/main.tsp", + "specification/service/type-changed.json", + ], + deletions: ["specification/old-service/contoso.json"], + renames: [ + { + from: "specification/service/old-name.json", + to: "specification/service/new-name.json", + }, + ], + total: 6, + }); + }); + + it("should handle empty git output", async () => { + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue(""); + const result = await getChangedFilesStatuses(); + expect(result).toEqual({ + additions: [], + modifications: [], + deletions: [], + renames: [], + total: 0, + }); + }); + + it("should handle only additions", async () => { + const gitOutput = [ + "A\tspecification/service1/readme.md", + "A\tspecification/service2/main.tsp", + ].join("\n"); + + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue(gitOutput); + const result = await getChangedFilesStatuses(); + expect(result).toEqual({ + additions: ["specification/service1/readme.md", "specification/service2/main.tsp"], + modifications: [], + deletions: [], + renames: [], + total: 2, + }); + }); + + it("should handle only renames", async () => { + const gitOutput = [ + "R95\told/path/file1.json\tnew/path/file1.json", + "R100\tservice/old.tsp\tservice/new.tsp", + ].join("\n"); + + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue(gitOutput); + const result = await getChangedFilesStatuses(); + expect(result).toEqual({ + additions: [], + modifications: [], + deletions: [], + renames: [ + { + from: "old/path/file1.json", + to: "new/path/file1.json", + }, + { + from: "service/old.tsp", + to: "service/new.tsp", + }, + ], + total: 2, + }); + }); + + it("should pass git options correctly", async () => { + const options = { + baseCommitish: "origin/main", + headCommitish: "feature-branch", + cwd: "/custom/path", + }; + + vi.mocked(simpleGit.simpleGit().diff).mockResolvedValue("A\ttest.json"); + await getChangedFilesStatuses(options); + expect(simpleGit.simpleGit).toHaveBeenCalledWith("/custom/path"); + expect(simpleGit.simpleGit().diff).toHaveBeenCalledWith([ + "--name-status", + "origin/main", + "feature-branch", + ]); + }); + }); +}); diff --git a/.github/test/equality.test.js b/.github/shared/test/equality.test.js similarity index 100% rename from .github/test/equality.test.js rename to .github/shared/test/equality.test.js diff --git a/.github/shared/test/error-reporting.test.js b/.github/shared/test/error-reporting.test.js new file mode 100644 index 000000000000..107e2141de71 --- /dev/null +++ b/.github/shared/test/error-reporting.test.js @@ -0,0 +1,49 @@ +// @ts-check + +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { setSummary, annotateFileError } from "../src/error-reporting.js"; +import fs from "fs/promises"; + +describe("ErrorReporting", () => { + let logSpy; + + beforeEach(() => { + logSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + // ensure that on test runs GITHUB_STEP_SUMMARY is not set in my current env by default + // this gives us a clean slate for each test + delete process.env.GITHUB_STEP_SUMMARY; + }); + + afterEach(() => { + logSpy.mockRestore(); + }); + + it("should warn when GITHUB_STEP_SUMMARY is unset", () => { + setSummary("hello"); + expect(logSpy).toHaveBeenCalledWith("GITHUB_STEP_SUMMARY is not set. Skipping summary update."); + }); + + it("should write to the summary file when GITHUB_STEP_SUMMARY is set", async () => { + process.env.GITHUB_STEP_SUMMARY = `${__dirname}/tmp-summary.md`; + + await fs.rm(process.env.GITHUB_STEP_SUMMARY, { force: true }); + + setSummary("# Title"); + + expect(logSpy).not.toHaveBeenCalledWith( + "GITHUB_STEP_SUMMARY is not set. Skipping summary update.", + ); + + const content = await fs.readFile(process.env.GITHUB_STEP_SUMMARY, "utf-8"); + + // cleanup after the test so nothing is left behi + await fs.rm(process.env.GITHUB_STEP_SUMMARY, { force: true }); + + expect(content).toBe("# Title"); + }); + + it("should emit a GitHub-style error annotation", () => { + annotateFileError("src/foo.js", "Something broke", 42, 7); + expect(logSpy).toHaveBeenCalledWith("::error file=src/foo.js,line=42,col=7::Something broke"); + }); +}); diff --git a/.github/shared/test/examples.js b/.github/shared/test/examples.js new file mode 100644 index 000000000000..12336f56be2f --- /dev/null +++ b/.github/shared/test/examples.js @@ -0,0 +1,112 @@ +// @ts-check + +export const swaggerHandWritten = JSON.stringify("foo"); + +export const swaggerTypeSpecGenerated = JSON.stringify({ + info: { + "x-typespec-generated": [{ emitter: "@azure-tools/typespec-autorest" }], + }, +}); + +export const contosoTspConfig = ` +parameters: + "service-dir": + default: "sdk/contosowidgetmanager" + "dependencies": + default: "" +emit: + - "@azure-tools/typespec-autorest" +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/data-plane" +options: + "@azure-tools/typespec-autorest": + azure-resource-provider-folder: "data-plane" + emit-lro-options: "none" + emitter-output-dir: "{project-root}/.." + output-file: "{azure-resource-provider-folder}/{service-name}/{version-status}/{version}/widgets.json" + "@azure-tools/typespec-python": + package-dir: "azure-contoso-widgetmanager" + namespace: "azure.contoso.widgetmanager" + generate-test: true + generate-sample: true + flavor: azure + "@azure-tools/typespec-csharp": + package-dir: "Azure.Template.Contoso" + clear-output-folder: true + model-namespace: false + namespace: "{package-dir}" + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "contosowidgetmanager-rest" + package-details: + name: "@azure-rest/contoso-widgetmanager-rest" + flavor: azure + "@azure-tools/typespec-java": + package-dir: "azure-contoso-widgetmanager" + namespace: com.azure.contoso.widgetmanager + flavor: azure + "@azure-tools/typespec-go": + module: "github.com/Azure/azure-sdk-for-go/{service-dir}/{package-dir}" + service-dir: "sdk/contosowidget" + package-dir: "azmanager" + module-version: "0.0.1" + generate-fakes: true + inject-spans: true + single-client: true + slice-elements-byval: true + "@azure-tools/typespec-client-generator-cli": + additionalDirectories: + - "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/" +`; + +export const contosoReadme = ` +# contosowidgetmanager + +> see https://aka.ms/autorest +This is the AutoRest configuration file for Contoso. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via \`npm\` (\`npm install -g autorest\`) and then run: + +> \`autorest readme.md\` +To see additional help and options, run: + +> \`autorest --help\` +For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings for the containerstorage. + +\`\`\`yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +\`\`\` + +### Tag: package-2021-11-01 + +These settings apply only when \`--tag=package-2021-11-01\` is specified on the command line. + +\`\`\`yaml $(tag) == 'package-2021-11-01' +input-file: + - Microsoft.Contoso/stable/2021-11-01/contoso.json +\`\`\` + +### Tag: package-2021-10-01-preview + +These settings apply only when \`--tag=package-2021-10-01-preview\` is specified on the command line. + +\`\`\`yaml $(tag) == 'package-2021-10-01-preview' +input-file: + - Microsoft.Contoso/preview/2021-10-01-preview/contoso.json +\`\`\` + +--- +`; diff --git a/.github/shared/test/exec.test.js b/.github/shared/test/exec.test.js new file mode 100644 index 000000000000..818a40a84924 --- /dev/null +++ b/.github/shared/test/exec.test.js @@ -0,0 +1,87 @@ +import semver from "semver"; +import { describe, expect, it } from "vitest"; +import { execFile, execNpm, execNpmExec, isExecError } from "../src/exec.js"; +import { debugLogger } from "../src/logger.js"; + +const options = { logger: debugLogger }; + +describe("execFile", () => { + const file = "node"; + const args = ["-e", `console.log("test")`]; + const expected = "test\n"; + + it.each([{}, options])("exec succeeds with default buffer (options: %o)", async (options) => { + await expect(execFile(file, args, options)).resolves.toEqual({ + stdout: expected, + stderr: "", + }); + }); + + it("exec succeeds with exact-sized buffer", async () => { + await expect(execFile(file, args, { ...options, maxBuffer: expected.length })).resolves.toEqual( + { stdout: expected, stderr: "" }, + ); + }); + + it("exec fails with too-small buffer", async () => { + await expect( + execFile(file, args, { ...options, maxBuffer: expected.length - 1 }), + ).rejects.toThrowError( + expect.objectContaining({ + stdout: "test", + stderr: "", + code: "ERR_CHILD_PROCESS_STDIO_MAXBUFFER", + }), + ); + }); +}); + +describe("execNpm", () => { + it("succeeds with --version", async () => { + await expect(execNpm(["--version"], options)).resolves.toEqual({ + stdout: expect.toSatisfy((v) => semver.valid(v)), + stderr: "", + }); + }); + + it("fails with --help", async () => { + await expect(execNpm(["--help"], options)).rejects.toThrowError( + expect.objectContaining({ + stdout: expect.stringMatching(/usage/i), + stderr: "", + code: 1, + }), + ); + }); +}); + +describe("execNpmExec", () => { + // A command run in the context of "npm exec --no -- ___" needs to call + // something referenced in package.json. In this case, js-yaml is present + // so it is used. + it("runs js-yaml", async () => { + await expect(execNpmExec(["js-yaml", "--version"], options)).resolves.toEqual({ + stdout: expect.toSatisfy((v) => semver.valid(v)), + stderr: "", + error: undefined, + }); + }); +}); + +describe("isExecError", () => { + it("isExecError", () => { + expect(isExecError("test")).toBe(false); + + const error = new Error(); + expect(isExecError(error)).toBe(false); + + error.stdout = "test"; + expect(isExecError(error)).toBe(true); + + delete error.stdout; + expect(isExecError(error)).toBe(false); + + error.stderr = "test"; + expect(isExecError(error)).toBe(true); + }); +}); diff --git a/.github/shared/test/fixtures/Swagger/ignoreExamples/examples/example.json b/.github/shared/test/fixtures/Swagger/ignoreExamples/examples/example.json new file mode 100644 index 000000000000..05a1d7a8679e --- /dev/null +++ b/.github/shared/test/fixtures/Swagger/ignoreExamples/examples/example.json @@ -0,0 +1,6 @@ +{ + "value": { + "id": "123", + "name": "Sample Item" + } +} diff --git a/.github/shared/test/fixtures/Swagger/ignoreExamples/included.json b/.github/shared/test/fixtures/Swagger/ignoreExamples/included.json new file mode 100644 index 000000000000..8c15c9f184f7 --- /dev/null +++ b/.github/shared/test/fixtures/Swagger/ignoreExamples/included.json @@ -0,0 +1,22 @@ +{ + "swagger": "2.0", + "info": { + "title": "Item Model", + "version": "1.0.0" + }, + "paths": {}, + "definitions": { + "Item": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": ["id", "name"] + } + } +} diff --git a/.github/shared/test/fixtures/Swagger/ignoreExamples/swagger.json b/.github/shared/test/fixtures/Swagger/ignoreExamples/swagger.json new file mode 100644 index 000000000000..392038166343 --- /dev/null +++ b/.github/shared/test/fixtures/Swagger/ignoreExamples/swagger.json @@ -0,0 +1,27 @@ +{ + "swagger": "2.0", + "info": { + "title": "Test Fixture API", + "version": "1.0.0" + }, + "paths": { + "/item": { + "get": { + "summary": "Get an item", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "included.json#/definitions/Item" + }, + "examples": { + "application/json": { + "$ref": "examples/example.json" + } + } + } + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/common-types/resource-management/v5/types.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/common-types/resource-management/v5/types.json new file mode 100644 index 000000000000..600190811bf8 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/common-types/resource-management/v5/types.json @@ -0,0 +1,722 @@ +{ + "swagger": "2.0", + "info": { + "title": "Common types", + "version": "5.0" + }, + "paths": {}, + "definitions": { + "AzureEntityResource": { + "type": "object", + "title": "Entity Resource", + "description": "The resource model definition for an Azure Resource Manager resource with an etag.", + "properties": { + "etag": { + "type": "string", + "description": "Resource Etag.", + "readOnly": true + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "x-ms-client-name": "AzureEntityResource" + }, + "CheckNameAvailabilityRequest": { + "type": "object", + "description": "The check availability request body.", + "properties": { + "name": { + "type": "string", + "description": "The name of the resource for which availability needs to be checked." + }, + "type": { + "type": "string", + "description": "The resource type." + } + } + }, + "CheckNameAvailabilityResponse": { + "type": "object", + "description": "The check availability result.", + "properties": { + "nameAvailable": { + "type": "boolean", + "description": "Indicates if the resource name is available." + }, + "reason": { + "type": "string", + "description": "The reason why the given name is not available.", + "enum": [ + "Invalid", + "AlreadyExists" + ], + "x-ms-enum": { + "name": "CheckNameAvailabilityReason", + "modelAsString": true + } + }, + "message": { + "type": "string", + "description": "Detailed reason why the given name is available." + } + } + }, + "ErrorAdditionalInfo": { + "type": "object", + "description": "The resource management error additional info.", + "properties": { + "type": { + "type": "string", + "description": "The additional info type.", + "readOnly": true + }, + "info": { + "type": "object", + "description": "The additional info.", + "readOnly": true + } + } + }, + "ErrorDetail": { + "type": "object", + "description": "The error detail.", + "properties": { + "code": { + "type": "string", + "description": "The error code.", + "readOnly": true + }, + "message": { + "type": "string", + "description": "The error message.", + "readOnly": true + }, + "target": { + "type": "string", + "description": "The error target.", + "readOnly": true + }, + "details": { + "type": "array", + "description": "The error details.", + "items": { + "$ref": "#/definitions/ErrorDetail" + }, + "readOnly": true, + "x-ms-identifiers": [ + "message", + "target" + ] + }, + "additionalInfo": { + "type": "array", + "description": "The error additional info.", + "items": { + "$ref": "#/definitions/ErrorAdditionalInfo" + }, + "readOnly": true, + "x-ms-identifiers": [] + } + } + }, + "ErrorResponse": { + "type": "object", + "title": "Error response", + "description": "Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).", + "properties": { + "error": { + "$ref": "#/definitions/ErrorDetail", + "description": "The error object." + } + } + }, + "Identity": { + "type": "object", + "description": "Identity for the resource.", + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The principal ID of resource identity. The value must be an UUID.", + "readOnly": true + }, + "tenantId": { + "type": "string", + "format": "uuid", + "description": "The tenant ID of resource. The value must be an UUID.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The identity type.", + "enum": [ + "SystemAssigned" + ], + "x-ms-enum": { + "name": "ResourceIdentityType", + "modelAsString": false + } + } + } + }, + "KeyVaultProperties": { + "type": "object", + "properties": { + "keyIdentifier": { + "type": "string", + "description": "Key vault uri to access the encryption key." + }, + "identity": { + "type": "string", + "description": "The client ID of the identity which will be used to access key vault." + } + } + }, + "Operation": { + "type": "object", + "title": "REST API Operation", + "description": "Details of a REST API operation, returned from the Resource Provider Operations API", + "properties": { + "name": { + "type": "string", + "description": "The name of the operation, as per Resource-Based Access Control (RBAC). Examples: \"Microsoft.Compute/virtualMachines/write\", \"Microsoft.Compute/virtualMachines/capture/action\"", + "readOnly": true + }, + "isDataAction": { + "type": "boolean", + "description": "Whether the operation applies to data-plane. This is \"true\" for data-plane operations and \"false\" for ARM/control-plane operations.", + "readOnly": true + }, + "display": { + "type": "object", + "description": "Localized display information for this particular operation.", + "properties": { + "provider": { + "type": "string", + "description": "The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring Insights\" or \"Microsoft Compute\".", + "readOnly": true + }, + "resource": { + "type": "string", + "description": "The localized friendly name of the resource type related to this operation. E.g. \"Virtual Machines\" or \"Job Schedule Collections\".", + "readOnly": true + }, + "operation": { + "type": "string", + "description": "The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create or Update Virtual Machine\", \"Restart Virtual Machine\".", + "readOnly": true + }, + "description": { + "type": "string", + "description": "The short, localized friendly description of the operation; suitable for tool tips and detailed views.", + "readOnly": true + } + } + }, + "origin": { + "type": "string", + "description": "The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit logs UX. Default value is \"user,system\"", + "enum": [ + "user", + "system", + "user,system" + ], + "x-ms-enum": { + "name": "Origin", + "modelAsString": true + }, + "readOnly": true + }, + "actionType": { + "type": "string", + "description": "Enum. Indicates the action type. \"Internal\" refers to actions that are for internal only APIs.", + "enum": [ + "Internal" + ], + "x-ms-enum": { + "name": "ActionType", + "modelAsString": true + }, + "readOnly": true + } + } + }, + "OperationListResult": { + "type": "object", + "description": "A list of REST API operations supported by an Azure Resource Provider. It contains an URL link to get the next set of results.", + "properties": { + "value": { + "type": "array", + "description": "List of operations supported by the resource provider", + "items": { + "$ref": "#/definitions/Operation" + }, + "readOnly": true, + "x-ms-identifiers": [ + "name" + ] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "URL to get the next set of operation list results (if there are any).", + "readOnly": true + } + } + }, + "OperationStatusResult": { + "type": "object", + "description": "The current status of an async operation.", + "properties": { + "id": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified ID for the async operation." + }, + "resourceId": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified ID of the resource against which the original async operation was started.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "Name of the async operation." + }, + "status": { + "type": "string", + "description": "Operation status." + }, + "percentComplete": { + "type": "number", + "description": "Percent of the operation that is complete.", + "minimum": 0, + "maximum": 100 + }, + "startTime": { + "type": "string", + "format": "date-time", + "description": "The start time of the operation." + }, + "endTime": { + "type": "string", + "format": "date-time", + "description": "The end time of the operation." + }, + "operations": { + "type": "array", + "description": "The operations list.", + "items": { + "$ref": "#/definitions/OperationStatusResult" + } + }, + "error": { + "$ref": "#/definitions/ErrorDetail", + "description": "If present, details of the operation error." + } + }, + "required": [ + "status" + ] + }, + "Plan": { + "type": "object", + "description": "Plan for the resource.", + "properties": { + "name": { + "type": "string", + "description": "A user defined name of the 3rd Party Artifact that is being procured." + }, + "publisher": { + "type": "string", + "description": "The publisher of the 3rd Party Artifact that is being bought. E.g. NewRelic" + }, + "product": { + "type": "string", + "description": "The 3rd Party artifact that is being procured. E.g. NewRelic. Product maps to the OfferID specified for the artifact at the time of Data Market onboarding. " + }, + "promotionCode": { + "type": "string", + "description": "A publisher provided promotion code as provisioned in Data Market for the said product/artifact." + }, + "version": { + "type": "string", + "description": "The version of the desired product/artifact." + } + }, + "required": [ + "name", + "publisher", + "product" + ] + }, + "ProxyResource": { + "type": "object", + "title": "Proxy Resource", + "description": "The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location", + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "Resource": { + "type": "object", + "title": "Resource", + "description": "Common fields that are returned in the response for all Azure Resource Manager resources", + "properties": { + "id": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified resource ID for the resource. E.g. \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\"", + "readOnly": true + }, + "name": { + "type": "string", + "description": "The name of the resource", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\"", + "readOnly": true + }, + "systemData": { + "$ref": "#/definitions/systemData", + "description": "Azure Resource Manager metadata containing createdBy and modifiedBy information.", + "readOnly": true + } + }, + "x-ms-azure-resource": true + }, + "ResourceModelWithAllowedPropertySet": { + "type": "object", + "description": "The resource model definition containing the full set of allowed properties for a resource. Except properties bag, there cannot be a top level property outside of this set.", + "properties": { + "managedBy": { + "type": "string", + "description": "The fully qualified resource ID of the resource that manages this resource. Indicates if this resource is managed by another Azure resource. If this is present, complete mode deployment will not delete the resource if it is removed from the template since it is managed by another resource.", + "x-ms-mutability": [ + "read", + "create", + "update" + ] + }, + "kind": { + "type": "string", + "description": "Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. E.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider must validate and persist this value.", + "pattern": "^[-\\w\\._,\\(\\)]+$", + "x-ms-mutability": [ + "read", + "create" + ] + }, + "etag": { + "type": "string", + "description": "The etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal etag convention. Entity tags are used for comparing two or more entities from the same requested resource. HTTP/1.1 uses entity tags in the etag (section 14.19), If-Match (section 14.24), If-None-Match (section 14.26), and If-Range (section 14.27) header fields. ", + "readOnly": true + }, + "identity": { + "allOf": [ + { + "$ref": "#/definitions/Identity" + } + ] + }, + "sku": { + "allOf": [ + { + "$ref": "#/definitions/Sku" + } + ] + }, + "plan": { + "allOf": [ + { + "$ref": "#/definitions/Plan" + } + ] + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "x-ms-azure-resource": true + }, + "Sku": { + "type": "object", + "description": "The resource model definition representing SKU", + "properties": { + "name": { + "type": "string", + "description": "The name of the SKU. E.g. P3. It is typically a letter+number code" + }, + "tier": { + "$ref": "#/definitions/SkuTier" + }, + "size": { + "type": "string", + "description": "The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. " + }, + "family": { + "type": "string", + "description": "If the service has different generations of hardware, for the same SKU, then that can be captured here." + }, + "capacity": { + "type": "integer", + "format": "int32", + "description": "If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted." + } + }, + "required": [ + "name" + ] + }, + "SkuTier": { + "type": "string", + "description": "This field is required to be implemented by the Resource Provider if the service has more than one tier, but is not required on a PUT.", + "enum": [ + "Free", + "Basic", + "Standard", + "Premium" + ], + "x-ms-enum": { + "name": "SkuTier", + "modelAsString": false + } + }, + "TrackedResource": { + "type": "object", + "title": "Tracked Resource", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ] + }, + "location": { + "type": "string", + "description": "The geo-location where the resource lives", + "x-ms-mutability": [ + "read", + "create" + ] + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "encryptionProperties": { + "type": "object", + "description": "Configuration of key for data encryption", + "properties": { + "status": { + "type": "string", + "description": "Indicates whether or not the encryption is enabled for container registry.", + "enum": [ + "enabled", + "disabled" + ], + "x-ms-enum": { + "name": "EncryptionStatus", + "modelAsString": true + } + }, + "keyVaultProperties": { + "$ref": "#/definitions/KeyVaultProperties", + "description": "Key vault properties." + } + } + }, + "locationData": { + "type": "object", + "description": "Metadata pertaining to the geographic location of the resource.", + "properties": { + "name": { + "type": "string", + "description": "A canonical name for the geographic or physical location.", + "maxLength": 256 + }, + "city": { + "type": "string", + "description": "The city or locality where the resource is located." + }, + "district": { + "type": "string", + "description": "The district, state, or province where the resource is located." + }, + "countryOrRegion": { + "type": "string", + "description": "The country or region where the resource is located" + } + }, + "required": [ + "name" + ] + }, + "systemData": { + "type": "object", + "description": "Metadata pertaining to creation and last modification of the resource.", + "properties": { + "createdBy": { + "type": "string", + "description": "The identity that created the resource." + }, + "createdByType": { + "type": "string", + "description": "The type of identity that created the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource creation (UTC)." + }, + "lastModifiedBy": { + "type": "string", + "description": "The identity that last modified the resource." + }, + "lastModifiedByType": { + "type": "string", + "description": "The type of identity that last modified the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "lastModifiedAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource last modification (UTC)" + } + }, + "readOnly": true + } + }, + "parameters": { + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1 + }, + "If-Match": { + "name": "ifMatch", + "in": "header", + "description": "The If-Match header that makes a request conditional.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "If-None-Match": { + "name": "ifNoneMatch", + "in": "header", + "description": "The If-None-Match header that makes a request conditional.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "LocationParameter": { + "name": "location", + "in": "path", + "description": "The name of the Azure region.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method" + }, + "ManagementGroupNameParameter": { + "name": "managementGroupName", + "in": "path", + "description": "The name of the management group. The name is case insensitive.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "OperationIdParameter": { + "name": "operationId", + "in": "path", + "description": "The ID of an ongoing async operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method" + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "description": "The name of the resource group. The name is case insensitive.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "ScopeParameter": { + "name": "scope", + "in": "path", + "description": "The scope at which the operation is performed.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-skip-url-encoding": true + }, + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "description": "The ID of the target subscription. The value must be an UUID.", + "required": true, + "type": "string", + "format": "uuid" + }, + "TenantIdParameter": { + "name": "tenantId", + "in": "path", + "description": "The Azure tenant ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000)", + "required": true, + "type": "string", + "format": "uuid", + "x-ms-parameter-location": "method" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json new file mode 100644 index 000000000000..c51e0e74c52f --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json @@ -0,0 +1,556 @@ +{ + "swagger": "2.0", + "info": { + "title": "Microsoft.Contoso management service", + "version": "2021-10-01-preview", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/Microsoft.Contoso/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..9b34209dcd17 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json new file mode 100644 index 000000000000..9ac7910eb3f7 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json new file mode 100644 index 000000000000..3ee7ff5b9c4f --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..f6512d2ba7c2 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..ffa095dd1d66 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json new file mode 100644 index 000000000000..f3f85a465653 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json new file mode 100644 index 000000000000..6185e205aa43 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-10-01-preview" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json new file mode 100644 index 000000000000..10ffa6631f16 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json @@ -0,0 +1,556 @@ +{ + "swagger": "2.0", + "info": { + "title": "Microsoft.Contoso management service", + "version": "2021-11-01", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/Microsoft.Contoso/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/readme.md b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/readme.md new file mode 100644 index 000000000000..ae912e6ab9b0 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/contosowidgetmanager/resource-manager/readme.md @@ -0,0 +1,48 @@ +# containerstorage + +> see https://aka.ms/autorest +This is the AutoRest configuration file for Contoso. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via `npm` (`npm install -g autorest`) and then run: + +> `autorest readme.md` +To see additional help and options, run: + +> `autorest --help` +For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings for the containerstorage. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - Microsoft.Contoso/stable/2021-11-01/contoso.json +``` + +### Tag: package-2021-10-01-preview + +These settings apply only when `--tag=package-2021-10-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2021-10-01-preview' +input-file: + - Microsoft.Contoso/preview/2021-10-01-preview/contoso.json +``` + +--- diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/invalid-json.json b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/invalid-json.json new file mode 100644 index 000000000000..6b7f8497d846 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/invalid-json.json @@ -0,0 +1 @@ +invalid json \ No newline at end of file diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/readme.md b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/readme.md new file mode 100644 index 000000000000..0a719f6d8418 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-invalid-json/readme.md @@ -0,0 +1,19 @@ +# resolve-error + +Test how resolve errors are handled when a tag points to an input-file that does +not contian valid JSON. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - invalid-json.json +``` diff --git a/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-not-found/readme.md b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-not-found/readme.md new file mode 100644 index 000000000000..e95ed9add13f --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedReadmeTags/specification/input-file-not-found/readme.md @@ -0,0 +1,19 @@ +# resolve-error + +Test how resolve errors are handled when a tag points to an input-file that +does not exist. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - does-not-exist.json +``` diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/a.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/a.json new file mode 100644 index 000000000000..0d9a7af77520 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/a.json @@ -0,0 +1,41 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service A", + "version": "1.0.0" + }, + "paths": { + "/a": { + "get": { + "summary": "Get A (refs B)", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/B" + } + } + } + }, + "put": { + "summary": "Update A (refs C)", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/C" + } + } + } + } + } + }, + "definitions": { + "B": { + "$ref": "./nesting/b.json#/definitions/B" + }, + "C": { + "$ref": "c.json#/definitions/C" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/c.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/c.json new file mode 100644 index 000000000000..8bec3cc3498e --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/c.json @@ -0,0 +1,51 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service C", + "version": "1.0.0" + }, + "paths": { + "/c": { + "get": { + "summary": "Get C", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/C" + } + } + } + } + }, + "/d": { + "get": { + "summary": "Get D", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/D" + } + } + } + } + } + }, + "definitions": { + "C": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + }, + "D": { + "$ref": "d.json#/definitions/D" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/d.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/d.json new file mode 100644 index 000000000000..ad2cee57453b --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/d.json @@ -0,0 +1,51 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service D", + "version": "1.0.0" + }, + "paths": { + "/d": { + "get": { + "summary": "Get D", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/D" + } + } + } + } + }, + "/shared-thing": { + "get": { + "summary": "Get SharedThing", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/SharedThing" + } + } + } + } + } + }, + "definitions": { + "D": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "description": { + "type": "string" + } + } + }, + "SharedThing": { + "$ref": "./shared/shared.json#/definitions/SharedThing" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/e.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/e.json new file mode 100644 index 000000000000..ff2304866ec0 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/e.json @@ -0,0 +1,38 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service E", + "version": "1.0.0" + }, + "paths": { + "/e": { + "get": { + "summary": "Get E", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/E" + } + } + } + } + } + }, + "definitions": { + "E": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "description": { + "type": "string" + } + } + }, + "SharedThing": { + "$ref": "./shared/shared.json#/definitions/SharedThing" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/nesting/b.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/nesting/b.json new file mode 100644 index 000000000000..d71cadf5d9d4 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/nesting/b.json @@ -0,0 +1,49 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service B", + "version": "1.0.0" + }, + "paths": { + "/b": { + "get": { + "summary": "Get B (refs C)", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/C" + } + } + } + }, + "put": { + "summary": "Update B (refs D)", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/D" + } + } + } + } + } + }, + "definitions": { + "B": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "description": { + "type": "string" + } + } + }, + "C": { + "$ref": "../c.json#/definitions/C" + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/readme.md b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/readme.md new file mode 100644 index 000000000000..68bb802fe44f --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/readme.md @@ -0,0 +1,40 @@ +# Contoso.WidgetManager + +## Configuration + +Testing buildState + +### Basic Information + +```yaml +openapi-type: data-plane +tag: tag-1 +``` + +### Tag: tag-1 + +These settings apply only when `--tag=tag-1` is specified on the command line. + +```yaml $(tag) == 'tag-1' +input-file: + - a.json +``` + +### Tag: tag-2 + +These settings apply only when `--tag=tag-2` is specified on the command line. + +```yaml $(tag) == 'tag-2' +input-file: + - e.json +``` + +### Tag: tag-3 + +These settings apply only when `--tag=tag-3` is specified on the command line. + +This tag has no yaml entities defined. + +```yaml $(tag) == 'tag-3' + +``` diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/shared/shared.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/shared/shared.json new file mode 100644 index 000000000000..4772308443c2 --- /dev/null +++ b/.github/shared/test/fixtures/getAffectedSwaggers/specification/1/data-plane/shared/shared.json @@ -0,0 +1,20 @@ +{ + "swagger": "2.0", + "info": { + "title": "Shared", + "version": "1.0.0" + }, + "definitions": { + "SharedThing": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "description": { + "type": "string" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/2/e.json b/.github/shared/test/fixtures/getAffectedSwaggers/specification/2/e.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/shared/test/fixtures/getAffectedSwaggers/specification/empty/keep.txt b/.github/shared/test/fixtures/getAffectedSwaggers/specification/empty/keep.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/shared/test/fixtures/getSpecModel/specification/common-types/resource-management/v5/types.json b/.github/shared/test/fixtures/getSpecModel/specification/common-types/resource-management/v5/types.json new file mode 100644 index 000000000000..600190811bf8 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/common-types/resource-management/v5/types.json @@ -0,0 +1,722 @@ +{ + "swagger": "2.0", + "info": { + "title": "Common types", + "version": "5.0" + }, + "paths": {}, + "definitions": { + "AzureEntityResource": { + "type": "object", + "title": "Entity Resource", + "description": "The resource model definition for an Azure Resource Manager resource with an etag.", + "properties": { + "etag": { + "type": "string", + "description": "Resource Etag.", + "readOnly": true + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "x-ms-client-name": "AzureEntityResource" + }, + "CheckNameAvailabilityRequest": { + "type": "object", + "description": "The check availability request body.", + "properties": { + "name": { + "type": "string", + "description": "The name of the resource for which availability needs to be checked." + }, + "type": { + "type": "string", + "description": "The resource type." + } + } + }, + "CheckNameAvailabilityResponse": { + "type": "object", + "description": "The check availability result.", + "properties": { + "nameAvailable": { + "type": "boolean", + "description": "Indicates if the resource name is available." + }, + "reason": { + "type": "string", + "description": "The reason why the given name is not available.", + "enum": [ + "Invalid", + "AlreadyExists" + ], + "x-ms-enum": { + "name": "CheckNameAvailabilityReason", + "modelAsString": true + } + }, + "message": { + "type": "string", + "description": "Detailed reason why the given name is available." + } + } + }, + "ErrorAdditionalInfo": { + "type": "object", + "description": "The resource management error additional info.", + "properties": { + "type": { + "type": "string", + "description": "The additional info type.", + "readOnly": true + }, + "info": { + "type": "object", + "description": "The additional info.", + "readOnly": true + } + } + }, + "ErrorDetail": { + "type": "object", + "description": "The error detail.", + "properties": { + "code": { + "type": "string", + "description": "The error code.", + "readOnly": true + }, + "message": { + "type": "string", + "description": "The error message.", + "readOnly": true + }, + "target": { + "type": "string", + "description": "The error target.", + "readOnly": true + }, + "details": { + "type": "array", + "description": "The error details.", + "items": { + "$ref": "#/definitions/ErrorDetail" + }, + "readOnly": true, + "x-ms-identifiers": [ + "message", + "target" + ] + }, + "additionalInfo": { + "type": "array", + "description": "The error additional info.", + "items": { + "$ref": "#/definitions/ErrorAdditionalInfo" + }, + "readOnly": true, + "x-ms-identifiers": [] + } + } + }, + "ErrorResponse": { + "type": "object", + "title": "Error response", + "description": "Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).", + "properties": { + "error": { + "$ref": "#/definitions/ErrorDetail", + "description": "The error object." + } + } + }, + "Identity": { + "type": "object", + "description": "Identity for the resource.", + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The principal ID of resource identity. The value must be an UUID.", + "readOnly": true + }, + "tenantId": { + "type": "string", + "format": "uuid", + "description": "The tenant ID of resource. The value must be an UUID.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The identity type.", + "enum": [ + "SystemAssigned" + ], + "x-ms-enum": { + "name": "ResourceIdentityType", + "modelAsString": false + } + } + } + }, + "KeyVaultProperties": { + "type": "object", + "properties": { + "keyIdentifier": { + "type": "string", + "description": "Key vault uri to access the encryption key." + }, + "identity": { + "type": "string", + "description": "The client ID of the identity which will be used to access key vault." + } + } + }, + "Operation": { + "type": "object", + "title": "REST API Operation", + "description": "Details of a REST API operation, returned from the Resource Provider Operations API", + "properties": { + "name": { + "type": "string", + "description": "The name of the operation, as per Resource-Based Access Control (RBAC). Examples: \"Microsoft.Compute/virtualMachines/write\", \"Microsoft.Compute/virtualMachines/capture/action\"", + "readOnly": true + }, + "isDataAction": { + "type": "boolean", + "description": "Whether the operation applies to data-plane. This is \"true\" for data-plane operations and \"false\" for ARM/control-plane operations.", + "readOnly": true + }, + "display": { + "type": "object", + "description": "Localized display information for this particular operation.", + "properties": { + "provider": { + "type": "string", + "description": "The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring Insights\" or \"Microsoft Compute\".", + "readOnly": true + }, + "resource": { + "type": "string", + "description": "The localized friendly name of the resource type related to this operation. E.g. \"Virtual Machines\" or \"Job Schedule Collections\".", + "readOnly": true + }, + "operation": { + "type": "string", + "description": "The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create or Update Virtual Machine\", \"Restart Virtual Machine\".", + "readOnly": true + }, + "description": { + "type": "string", + "description": "The short, localized friendly description of the operation; suitable for tool tips and detailed views.", + "readOnly": true + } + } + }, + "origin": { + "type": "string", + "description": "The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit logs UX. Default value is \"user,system\"", + "enum": [ + "user", + "system", + "user,system" + ], + "x-ms-enum": { + "name": "Origin", + "modelAsString": true + }, + "readOnly": true + }, + "actionType": { + "type": "string", + "description": "Enum. Indicates the action type. \"Internal\" refers to actions that are for internal only APIs.", + "enum": [ + "Internal" + ], + "x-ms-enum": { + "name": "ActionType", + "modelAsString": true + }, + "readOnly": true + } + } + }, + "OperationListResult": { + "type": "object", + "description": "A list of REST API operations supported by an Azure Resource Provider. It contains an URL link to get the next set of results.", + "properties": { + "value": { + "type": "array", + "description": "List of operations supported by the resource provider", + "items": { + "$ref": "#/definitions/Operation" + }, + "readOnly": true, + "x-ms-identifiers": [ + "name" + ] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "URL to get the next set of operation list results (if there are any).", + "readOnly": true + } + } + }, + "OperationStatusResult": { + "type": "object", + "description": "The current status of an async operation.", + "properties": { + "id": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified ID for the async operation." + }, + "resourceId": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified ID of the resource against which the original async operation was started.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "Name of the async operation." + }, + "status": { + "type": "string", + "description": "Operation status." + }, + "percentComplete": { + "type": "number", + "description": "Percent of the operation that is complete.", + "minimum": 0, + "maximum": 100 + }, + "startTime": { + "type": "string", + "format": "date-time", + "description": "The start time of the operation." + }, + "endTime": { + "type": "string", + "format": "date-time", + "description": "The end time of the operation." + }, + "operations": { + "type": "array", + "description": "The operations list.", + "items": { + "$ref": "#/definitions/OperationStatusResult" + } + }, + "error": { + "$ref": "#/definitions/ErrorDetail", + "description": "If present, details of the operation error." + } + }, + "required": [ + "status" + ] + }, + "Plan": { + "type": "object", + "description": "Plan for the resource.", + "properties": { + "name": { + "type": "string", + "description": "A user defined name of the 3rd Party Artifact that is being procured." + }, + "publisher": { + "type": "string", + "description": "The publisher of the 3rd Party Artifact that is being bought. E.g. NewRelic" + }, + "product": { + "type": "string", + "description": "The 3rd Party artifact that is being procured. E.g. NewRelic. Product maps to the OfferID specified for the artifact at the time of Data Market onboarding. " + }, + "promotionCode": { + "type": "string", + "description": "A publisher provided promotion code as provisioned in Data Market for the said product/artifact." + }, + "version": { + "type": "string", + "description": "The version of the desired product/artifact." + } + }, + "required": [ + "name", + "publisher", + "product" + ] + }, + "ProxyResource": { + "type": "object", + "title": "Proxy Resource", + "description": "The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location", + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "Resource": { + "type": "object", + "title": "Resource", + "description": "Common fields that are returned in the response for all Azure Resource Manager resources", + "properties": { + "id": { + "type": "string", + "format": "arm-id", + "description": "Fully qualified resource ID for the resource. E.g. \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\"", + "readOnly": true + }, + "name": { + "type": "string", + "description": "The name of the resource", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\"", + "readOnly": true + }, + "systemData": { + "$ref": "#/definitions/systemData", + "description": "Azure Resource Manager metadata containing createdBy and modifiedBy information.", + "readOnly": true + } + }, + "x-ms-azure-resource": true + }, + "ResourceModelWithAllowedPropertySet": { + "type": "object", + "description": "The resource model definition containing the full set of allowed properties for a resource. Except properties bag, there cannot be a top level property outside of this set.", + "properties": { + "managedBy": { + "type": "string", + "description": "The fully qualified resource ID of the resource that manages this resource. Indicates if this resource is managed by another Azure resource. If this is present, complete mode deployment will not delete the resource if it is removed from the template since it is managed by another resource.", + "x-ms-mutability": [ + "read", + "create", + "update" + ] + }, + "kind": { + "type": "string", + "description": "Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. E.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider must validate and persist this value.", + "pattern": "^[-\\w\\._,\\(\\)]+$", + "x-ms-mutability": [ + "read", + "create" + ] + }, + "etag": { + "type": "string", + "description": "The etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal etag convention. Entity tags are used for comparing two or more entities from the same requested resource. HTTP/1.1 uses entity tags in the etag (section 14.19), If-Match (section 14.24), If-None-Match (section 14.26), and If-Range (section 14.27) header fields. ", + "readOnly": true + }, + "identity": { + "allOf": [ + { + "$ref": "#/definitions/Identity" + } + ] + }, + "sku": { + "allOf": [ + { + "$ref": "#/definitions/Sku" + } + ] + }, + "plan": { + "allOf": [ + { + "$ref": "#/definitions/Plan" + } + ] + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "x-ms-azure-resource": true + }, + "Sku": { + "type": "object", + "description": "The resource model definition representing SKU", + "properties": { + "name": { + "type": "string", + "description": "The name of the SKU. E.g. P3. It is typically a letter+number code" + }, + "tier": { + "$ref": "#/definitions/SkuTier" + }, + "size": { + "type": "string", + "description": "The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. " + }, + "family": { + "type": "string", + "description": "If the service has different generations of hardware, for the same SKU, then that can be captured here." + }, + "capacity": { + "type": "integer", + "format": "int32", + "description": "If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted." + } + }, + "required": [ + "name" + ] + }, + "SkuTier": { + "type": "string", + "description": "This field is required to be implemented by the Resource Provider if the service has more than one tier, but is not required on a PUT.", + "enum": [ + "Free", + "Basic", + "Standard", + "Premium" + ], + "x-ms-enum": { + "name": "SkuTier", + "modelAsString": false + } + }, + "TrackedResource": { + "type": "object", + "title": "Tracked Resource", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ] + }, + "location": { + "type": "string", + "description": "The geo-location where the resource lives", + "x-ms-mutability": [ + "read", + "create" + ] + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "encryptionProperties": { + "type": "object", + "description": "Configuration of key for data encryption", + "properties": { + "status": { + "type": "string", + "description": "Indicates whether or not the encryption is enabled for container registry.", + "enum": [ + "enabled", + "disabled" + ], + "x-ms-enum": { + "name": "EncryptionStatus", + "modelAsString": true + } + }, + "keyVaultProperties": { + "$ref": "#/definitions/KeyVaultProperties", + "description": "Key vault properties." + } + } + }, + "locationData": { + "type": "object", + "description": "Metadata pertaining to the geographic location of the resource.", + "properties": { + "name": { + "type": "string", + "description": "A canonical name for the geographic or physical location.", + "maxLength": 256 + }, + "city": { + "type": "string", + "description": "The city or locality where the resource is located." + }, + "district": { + "type": "string", + "description": "The district, state, or province where the resource is located." + }, + "countryOrRegion": { + "type": "string", + "description": "The country or region where the resource is located" + } + }, + "required": [ + "name" + ] + }, + "systemData": { + "type": "object", + "description": "Metadata pertaining to creation and last modification of the resource.", + "properties": { + "createdBy": { + "type": "string", + "description": "The identity that created the resource." + }, + "createdByType": { + "type": "string", + "description": "The type of identity that created the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource creation (UTC)." + }, + "lastModifiedBy": { + "type": "string", + "description": "The identity that last modified the resource." + }, + "lastModifiedByType": { + "type": "string", + "description": "The type of identity that last modified the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "lastModifiedAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource last modification (UTC)" + } + }, + "readOnly": true + } + }, + "parameters": { + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1 + }, + "If-Match": { + "name": "ifMatch", + "in": "header", + "description": "The If-Match header that makes a request conditional.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "If-None-Match": { + "name": "ifNoneMatch", + "in": "header", + "description": "The If-None-Match header that makes a request conditional.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "LocationParameter": { + "name": "location", + "in": "path", + "description": "The name of the Azure region.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method" + }, + "ManagementGroupNameParameter": { + "name": "managementGroupName", + "in": "path", + "description": "The name of the management group. The name is case insensitive.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "OperationIdParameter": { + "name": "operationId", + "in": "path", + "description": "The ID of an ongoing async operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method" + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "description": "The name of the resource group. The name is case insensitive.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "ScopeParameter": { + "name": "scope", + "in": "path", + "description": "The scope at which the operation is performed.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-skip-url-encoding": true + }, + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "description": "The ID of the target subscription. The value must be an UUID.", + "required": true, + "type": "string", + "format": "uuid" + }, + "TenantIdParameter": { + "name": "tenantId", + "in": "path", + "description": "The Azure tenant ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000)", + "required": true, + "type": "string", + "format": "uuid", + "x-ms-parameter-location": "method" + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/data-plane/readme.md b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/data-plane/readme.md new file mode 100644 index 000000000000..9e353686c29f --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/data-plane/readme.md @@ -0,0 +1,85 @@ +# Contoso.WidgetManager + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Contoso.WidgetManager. + +## Configuration + +### Basic Information + +This is a TypeSpec project so we only want to readme to default the default tag and point to the outputted swagger file. +This is used for some tools such as doc generation and swagger apiview generation it isn't used for SDK code gen as we +use the native TypeSpec code generation configured in the tspconfig.yaml file. + +```yaml +openapi-type: data-plane +tag: package-2022-12-01 +``` + +### Tag: package-2022-12-01 + +These settings apply only when `--tag=package-2022-12-01` is specified on the command line. + +```yaml $(tag) == 'package-2022-12-01' +input-file: + - Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json +``` + +Define the tag twice to test code that handles duplicate tag definitions. + +```yaml $(tag) == 'package-2022-12-01' +input-file: + - Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Tag: package-2022-11-01-preview + +These settings apply only when `--tag=package-2022-11-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2022-11-01-preview' +input-file: + - Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Suppress rules that might be fixed + +These set of linting rules we expect to fixed in typespec-autorest emitter but for now suppressing. +Github issue filed at https://github.com/Azure/typespec-azure/issues/2762 + +```yaml +suppressions: + - code: LroExtension + - code: SchemaTypeAndFormat + - code: PathParameterSchema +``` diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json new file mode 100644 index 000000000000..c51e0e74c52f --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json @@ -0,0 +1,556 @@ +{ + "swagger": "2.0", + "info": { + "title": "Microsoft.Contoso management service", + "version": "2021-10-01-preview", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/Microsoft.Contoso/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..9b34209dcd17 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json new file mode 100644 index 000000000000..9ac7910eb3f7 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json new file mode 100644 index 000000000000..3ee7ff5b9c4f --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..f6512d2ba7c2 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..ffa095dd1d66 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json new file mode 100644 index 000000000000..f3f85a465653 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json new file mode 100644 index 000000000000..6185e205aa43 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-10-01-preview" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json new file mode 100644 index 000000000000..10ffa6631f16 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json @@ -0,0 +1,556 @@ +{ + "swagger": "2.0", + "info": { + "title": "Microsoft.Contoso management service", + "version": "2021-11-01", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/Microsoft.Contoso/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/readme.md b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/readme.md new file mode 100644 index 000000000000..21d77042619b --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager/readme.md @@ -0,0 +1,78 @@ +# containerstorage + +> see https://aka.ms/autorest +This is the AutoRest configuration file for Contoso. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via `npm` (`npm install -g autorest`) and then run: + +> `autorest readme.md` +To see additional help and options, run: + +> `autorest --help` +For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings for the containerstorage. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +`$(this-folder)` points to the folder where the readme.md file is located. It +may be used in some specs. In those cases, it can effectively be treated as "." +because the values in input-files are already relative to the current readme.md +file. + +Some files may also have a backslash in the path. Path separators should be +forward slashes, but backslash is supported. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - $(this-folder)/Microsoft.Contoso/stable/2021-11-01\contoso.json +``` + +### Tag: package-2021-10-01-preview + +These settings apply only when `--tag=package-2021-10-01-preview` is specified on the command line. + +input-file can be a single file or an array of files. Test the single value +scenario. + +```yaml $(tag) == 'package-2021-10-01-preview' +input-file: Microsoft.Contoso/preview/2021-10-01-preview/contoso.json +``` + +### Tag: empty-properties + +This tag has no yaml entites defined and is intended to test parser scenarios +around empty properties. + +``` yaml $(tag) == 'empty-properties' + +``` + +### Tag: no-input-files + +This tag has no input-files and is intended to test parser scenarios around no +input-files. + +``` yaml $(tag) == 'no-input-files' +some-thing: + - some-thing-1 + - some-thing-2 +``` + +--- diff --git a/.github/shared/test/fixtures/getSpecModel/specification/yaml-date-parsing/readme.md b/.github/shared/test/fixtures/getSpecModel/specification/yaml-date-parsing/readme.md new file mode 100644 index 000000000000..b192c9dcd763 --- /dev/null +++ b/.github/shared/test/fixtures/getSpecModel/specification/yaml-date-parsing/readme.md @@ -0,0 +1,29 @@ +# Test date parsing in YAML decoding + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Contoso.WidgetManager. + +## Configuration + +### Basic Information + +Use a date-only string (leading or following qualifiers like "package", "preview", etc.). The YAML parser should +parse this as a string and not a Date. + +```yaml +openapi-type: data-plane +tag: 2022-12-01 +``` + +### Tag: 2022-12-01 + +These settings apply only when `--tag=package-2022-12-01` is specified on the command line. + +```yaml $(tag) == '2022-12-01' +``` + +### Tag: package-2022-12-01 + +```yaml $(tag) == 'package-2022-12-01' +``` \ No newline at end of file diff --git a/.github/shared/test/logger.test.js b/.github/shared/test/logger.test.js new file mode 100644 index 000000000000..f474d44a12b5 --- /dev/null +++ b/.github/shared/test/logger.test.js @@ -0,0 +1,42 @@ +// @ts-check + +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { ConsoleLogger, debugLogger, defaultLogger } from "../src/logger"; + +describe("logger", () => { + let debugSpy, errorSpy, logSpy; + + beforeEach(() => { + debugSpy = vi.spyOn(console, "debug"); + errorSpy = vi.spyOn(console, "error"); + logSpy = vi.spyOn(console, "log"); + }); + + afterEach(() => { + debugSpy.mockRestore(); + errorSpy.mockRestore(); + logSpy.mockRestore(); + }); + + it.each([ + ["defaultLogger", defaultLogger, false], + ["debugLogger", debugLogger, true], + ["new ConsoleLogger(isDebug: false)", new ConsoleLogger(false), false], + ["new ConsoleLogger(isDebug: true)", new ConsoleLogger(true), true], + ])("%s", (_name, logger, isDebug) => { + expect(logger.isDebug()).toBe(isDebug); + + logger.info("test info"); + expect(logSpy).toBeCalledWith("test info"); + + logger.error("test error"); + expect(errorSpy).toBeCalledWith("test error"); + + logger.debug("test debug"); + if (isDebug) { + expect(debugSpy).toBeCalledWith("test debug"); + } else { + expect(debugSpy).toBeCalledTimes(0); + } + }); +}); diff --git a/.github/shared/test/readme.test.js b/.github/shared/test/readme.test.js new file mode 100644 index 000000000000..4602546d189b --- /dev/null +++ b/.github/shared/test/readme.test.js @@ -0,0 +1,83 @@ +// @ts-check + +import { resolve } from "path"; +import { describe, expect, it } from "vitest"; +import { ConsoleLogger } from "../src/logger.js"; +import { Readme, TagMatchRegex } from "../src/readme.js"; +import { SpecModel } from "../src/spec-model.js"; +import { contosoReadme } from "./examples.js"; + +const options = { logger: new ConsoleLogger(/*debug*/ true) }; + +describe("readme", () => { + it("can be created with mock path", async () => { + const readme = new Readme("bar"); + expect(readme.path).toBe(resolve("bar")); + + await expect(readme.getTags()).rejects.toThrowError(/no such file or directory/i); + + expect(readme.specModel).toBeUndefined(); + }); + + it("resolves path against SpecModel", async () => { + const readme = new Readme("readme.md", { + specModel: new SpecModel("/specs/foo"), + }); + expect(readme.path).toBe(resolve("/specs/foo/readme.md")); + }); + + // TODO: Test that path is resolved against backpointer + + it("can be created with string content", async () => { + const folder = "/fake"; + const readme = new Readme(resolve(folder, "readme.md"), { + ...options, + content: contosoReadme, + }); + + const tags = await readme.getTags(); + const tagNames = [...tags.keys()]; + const expectedTagNames = ["package-2021-11-01", "package-2021-10-01-preview"]; + + expect(tagNames.sort()).toEqual(expectedTagNames.sort()); + + const swaggerPaths = [...tags.values()].flatMap((t) => [...t.inputFiles.keys()]); + + const expectedPaths = [ + resolve(folder, "Microsoft.Contoso/stable/2021-11-01/contoso.json"), + resolve(folder, "Microsoft.Contoso/preview/2021-10-01-preview/contoso.json"), + ]; + + expect(swaggerPaths.sort()).toEqual(expectedPaths.sort()); + }); + + it("can be created with empty content", async () => { + const folder = "/fake"; + const readme = new Readme(resolve(folder, "readme.md"), { + ...options, + // Simulate empty file + content: "", + }); + + const tags = await readme.getTags(); + + // Ensures code doesn't try to read file `/fake/readme.md` which would throw + expect(tags.size).toBe(0); + }); +}); + +describe("TagMatchRegex", () => { + it.each([ + ["```yaml $(package-A-tag) == 'package-A-[[Version]]'", false, undefined], + ["``` yaml $(tag)=='package-2017-03' && $(go)", true, "package-2017-03"], + ["``` yaml $(csharp) && $(tag) == 'release_4_0'", true, "release_4_0"], + ["``` yaml $(tag) == 'package-2021-12-01-preview'", true, "package-2021-12-01-preview"], + ['``` yaml $(tag) == "package-2025-06-05"', true, "package-2025-06-05"], + ["``` yaml $(tag) == 'package-2025-06-05\"", false, undefined], + ["``` yaml $(tag) == \"package-2025-06-05'", false, undefined], + ])("matches tags and extracts tag names properly: %s", (example, expectedMatch, expectedTag) => { + const match = example.match(TagMatchRegex); + expect(TagMatchRegex.test(example)).toEqual(expectedMatch); + expect(match?.[2]).toEqual(expectedTag); + }); +}); diff --git a/.github/shared/test/repo.js b/.github/shared/test/repo.js new file mode 100644 index 000000000000..c8de7692fad1 --- /dev/null +++ b/.github/shared/test/repo.js @@ -0,0 +1,8 @@ +// @ts-check + +import { dirname, join } from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +export const repoRoot = join(__dirname, "..", "..", ".."); diff --git a/.github/shared/test/sleep.test.js b/.github/shared/test/sleep.test.js new file mode 100644 index 000000000000..e0fecc52566d --- /dev/null +++ b/.github/shared/test/sleep.test.js @@ -0,0 +1,10 @@ +// @ts-check + +import { describe, expect, it } from "vitest"; +import { sleep } from "../src/sleep.js"; + +describe("sleep", () => { + it.each([-1, 0, 1, 2])("sleep(%s)", async (ms) => { + await expect(sleep(ms)).resolves.toBeUndefined(); + }); +}); diff --git a/.github/shared/test/sort.test.js b/.github/shared/test/sort.test.js new file mode 100644 index 000000000000..17db2445643a --- /dev/null +++ b/.github/shared/test/sort.test.js @@ -0,0 +1,35 @@ +// @ts-check + +import { describe, expect, it } from "vitest"; +import { byDate, invert } from "../src/sort.js"; + +describe("byDate", () => { + const input = [{ foo: "2025-01-01" }, { foo: "2023-01-01" }, { foo: "2024-01-01" }]; + + it("ascending by default", () => { + input.sort(byDate((s) => s.foo)); + + // Value `undefined` always sorts to the end + expect(input).toEqual([{ foo: "2023-01-01" }, { foo: "2024-01-01" }, { foo: "2025-01-01" }]); + }); + + it("descending with invert()", () => { + input.sort(invert(byDate((s) => s.foo))); + + // Value `undefined` always sorts to the end + expect(input).toEqual([{ foo: "2025-01-01" }, { foo: "2024-01-01" }, { foo: "2023-01-01" }]); + }); + + it.each([null, undefined, "invalid"])("invalid input: %s", (i) => { + /** @type {{foo: string | null | undefined}[]} */ + const input = [{ foo: "2025-01-01" }, { foo: "2024-01-01" }]; + const comparator = byDate((i) => i.foo); + + // Ensure base case doesn't throw + input.sort(comparator); + expect(input).toEqual([{ foo: "2024-01-01" }, { foo: "2025-01-01" }]); + + input[0].foo = i; + expect(() => input.sort(comparator)).toThrowError(`Unable to parse '${i}' to a valid date`); + }); +}); diff --git a/.github/shared/test/spec-model-error.test.js b/.github/shared/test/spec-model-error.test.js new file mode 100644 index 000000000000..3afe37a030bd --- /dev/null +++ b/.github/shared/test/spec-model-error.test.js @@ -0,0 +1,32 @@ +// @ts-check + +import { describe, expect, it } from "vitest"; +import { SpecModelError } from "../src/spec-model-error.js"; + +describe("SpecModelError", () => { + it("toString`", () => { + let error = new SpecModelError("test message"); + expect(error.toString()).toMatchInlineSnapshot(`"SpecModelError: test message"`); + + error.source = "/test/source.json"; + expect(error.toString()).toMatchInlineSnapshot(` + "SpecModelError: test message + Problem File: /test/source.json" + `); + + error.readme = "/test/readme.md"; + expect(error.toString()).toMatchInlineSnapshot(` + "SpecModelError: test message + Problem File: /test/source.json + Readme: /test/readme.md" + `); + + error.tag = "2025-01-01"; + expect(error.toString()).toMatchInlineSnapshot(` + "SpecModelError: test message + Problem File: /test/source.json + Readme: /test/readme.md + Tag: 2025-01-01" + `); + }); +}); diff --git a/.github/shared/test/spec-model.test.js b/.github/shared/test/spec-model.test.js new file mode 100644 index 000000000000..3520b372b018 --- /dev/null +++ b/.github/shared/test/spec-model.test.js @@ -0,0 +1,356 @@ +// @ts-check + +import { readdir } from "fs/promises"; +import { dirname, isAbsolute, join, resolve } from "path"; +import { describe, expect, it } from "vitest"; +import { mapAsync } from "../src/array.js"; +import { ConsoleLogger } from "../src/logger.js"; +import { SpecModel } from "../src/spec-model.js"; +import { repoRoot } from "./repo.js"; + +const options = { logger: new ConsoleLogger(/*debug*/ true) }; + +describe("SpecModel", () => { + it("can be created with mock folder", async () => { + const specModel = new SpecModel("foo"); + expect(specModel.folder).toBe(resolve("foo")); + + await expect(specModel.getReadmes()).rejects.toThrowError(/no such file or directory/i); + }); + + it("returns spec model", async () => { + const folder = resolve( + __dirname, + "fixtures/getSpecModel/specification/contosowidgetmanager/resource-manager", + ); + + const specModel = new SpecModel(folder, options); + + expect(specModel.toString()).toContain("SpecModel"); + expect(specModel.folder).toBe(folder); + + const readmes = [...(await specModel.getReadmes()).values()]; + expect(readmes.length).toBe(1); + + const readme = readmes[0]; + expect(readme.toString()).toContain("Readme"); + expect(readme.path).toBe(resolve(folder, "readme.md")); + expect(readme.specModel).toBe(specModel); + + expect(readme.getGlobalConfig()).resolves.toEqual({ + "openapi-type": "arm", + "openapi-subtype": "rpaas", + tag: "package-2021-11-01", + }); + + const tags = [...(await readme.getTags()).values()].sort((a, b) => + a.name.localeCompare(b.name), + ); + expect(tags.length).toBe(2); + + expect(tags[0].toString()).toContain("Tag"); + expect(tags[0].name).toBe("package-2021-10-01-preview"); + expect(tags[0].readme).toBe(readme); + + const inputFiles0 = [...tags[0].inputFiles.values()]; + expect(inputFiles0.length).toBe(1); + expect(inputFiles0[0].toString()).toContain("Swagger"); + expect(inputFiles0[0].path).toBe( + resolve(folder, "Microsoft.Contoso/preview/2021-10-01-preview/contoso.json"), + ); + expect(inputFiles0[0].tag).toBe(tags[0]); + + const refs0 = [...(await inputFiles0[0].getRefs()).values()].sort((a, b) => + a.path.localeCompare(b.path), + ); + + expect(refs0.length).toBe(1); + + expect(refs0[0].path).toBe( + resolve( + dirname(inputFiles0[0].path), + "../../../../../common-types/resource-management/v5/types.json", + ), + ); + expect(refs0[0].tag).toBe(tags[0]); + + expect(tags[1].name).toBe("package-2021-11-01"); + const inputFiles1 = [...tags[1].inputFiles.values()]; + expect(inputFiles1.length).toBe(1); + expect(inputFiles1[0].path).toBe( + resolve(folder, "Microsoft.Contoso/stable/2021-11-01/contoso.json"), + ); + expect(inputFiles1[0].tag).toBe(tags[1]); + + const jsonDefault = await specModel.toJSONAsync(); + const readmePathDefault = jsonDefault.readmes[0].path; + expect(isAbsolute(readmePathDefault)).toBe(true); + expect(jsonDefault.readmes[0].tags[0].inputFiles[0].refs).toBeUndefined(); + + const jsonRefsRelative = await specModel.toJSONAsync({ + includeRefs: true, + relativePaths: true, + }); + const readmePathRelative = jsonRefsRelative.readmes[0].path; + expect(isAbsolute(readmePathRelative)).toBe(false); + expect(jsonRefsRelative.readmes[0].tags[0].inputFiles[0].refs).toBeDefined(); + }); + + it("uses strings for tag names and doesn't parse Date object", async () => { + const folder = resolve(__dirname, "fixtures/getSpecModel/specification/yaml-date-parsing"); + + const specModel = new SpecModel(folder, options); + + const readme = [...(await specModel.getReadmes()).values()][0]; + + const globalConfig = await readme.getGlobalConfig(); + + const tag = globalConfig["tag"]; + + // @ts-ignore + expect(tag).not.toBeTypeOf(Date); + + expect(tag).toBeTypeOf("string"); + expect(tag).toBe("2022-12-01"); + }); + + it("throws when a tag is defined more than once", async () => { + // data-plane/readme.md defines the `package-2022-12-01` tag twice. + const folder = resolve( + __dirname, + "fixtures/getSpecModel/specification/contosowidgetmanager/data-plane", + ); + + const specModel = new SpecModel(folder, options); + + const readmes = await specModel.getReadmes(); + + await expect( + mapAsync([...readmes.values()], async (r) => await r.getTags()), + ).rejects.toThrowError(/multiple.*tag/i); + }); + + describe("getAffectedReadmeTags", () => { + it("returns affected readme tags", async () => { + const folder = resolve( + __dirname, + "fixtures/getAffectedReadmeTags/specification/contosowidgetmanager", + ); + + const specModel = new SpecModel(folder, options); + + const swaggerPath = resolve( + folder, + "resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", + ); + + const affectedReadmeTags = await specModel.getAffectedReadmeTags(swaggerPath); + + expect(affectedReadmeTags.size).toBe(1); + + const readmePath = [...affectedReadmeTags.keys()][0]; + expect(readmePath).toBe(resolve(folder, "resource-manager/readme.md")); + + const tagNames = [...[...affectedReadmeTags.values()][0].keys()]; + expect(tagNames.length).toBe(1); + + expect(tagNames[0]).toBe("package-2021-11-01"); + }); + + it("returns affected readme tags for multiple tags", async () => { + const folder = resolve(__dirname, "fixtures/getAffectedSwaggers/specification/1"); + + const specModel = new SpecModel(folder, options); + + const swaggerPath = resolve(folder, "data-plane/shared/shared.json"); + + const affectedReadmeTags = await specModel.getAffectedReadmeTags(swaggerPath); + + expect(affectedReadmeTags.size).toBe(1); + + const readmePath = [...affectedReadmeTags.keys()][0]; + expect(readmePath).toBe(resolve(folder, "data-plane/readme.md")); + + const tagNames = [...[...affectedReadmeTags.values()][0].keys()].sort(); + + expect(tagNames.length).toBe(2); + expect(tagNames[0]).toBe("tag-1"); + expect(tagNames[1]).toBe("tag-2"); + }); + + it("throws when an input-file is not found", async () => { + const folder = resolve( + __dirname, + "fixtures/getAffectedReadmeTags/specification/input-file-not-found", + ); + const specModel = new SpecModel(folder, options); + + expect( + specModel.getAffectedReadmeTags(resolve(folder, "data-plane/a.json")), + ).rejects.toThrowError(/Failed to resolve file for swagger/i); + }); + + it("throws when an input-file is invalid JSON", async () => { + const folder = resolve( + __dirname, + "fixtures/getAffectedReadmeTags/specification/input-file-invalid-json", + ); + const specModel = new SpecModel(folder, options); + + expect( + specModel.getAffectedReadmeTags(resolve(folder, "data-plane/a.json")), + ).rejects.toThrowError(/is not a valid JSON Schema/i); + }); + }); + + describe("getAffectedSwaggers", async () => { + const folder = resolve(__dirname, "fixtures/getAffectedSwaggers/specification/1"); + + const specModel = new SpecModel(folder, options); + + it("returns directly referenced swagger", async () => { + const swaggerPath = resolve(folder, "data-plane/a.json"); + + const actual = [...(await specModel.getAffectedSwaggers(swaggerPath)).keys()].sort(); + + const expected = ["data-plane/a.json"].map((p) => resolve(folder, p)).sort(); + + expect(actual).toEqual(expected); + }); + + it("throws when swagger file is not found", async () => { + const swaggerPath = resolve(folder, "data-plane/not-found.json"); + + await expect(specModel.getAffectedSwaggers(swaggerPath)).rejects.toThrowError( + /no affected swaggers/i, + ); + }); + + it("returns correct swaggers for one layer of dependencies", async () => { + const swaggerPath = resolve(folder, "data-plane/nesting/b.json"); + + const actual = [...(await specModel.getAffectedSwaggers(swaggerPath)).keys()].sort(); + + const expected = ["data-plane/a.json", "data-plane/nesting/b.json"] + .map((p) => resolve(folder, p)) + .sort(); + + expect(actual).toEqual(expected); + }); + + it("returns correct swaggers for two layers of dependencies", async () => { + const swaggerPath = resolve(folder, "data-plane/c.json"); + + const actual = [...(await specModel.getAffectedSwaggers(swaggerPath)).keys()].sort(); + + const expected = ["data-plane/a.json", "data-plane/nesting/b.json", "data-plane/c.json"] + .map((p) => resolve(folder, p)) + .sort(); + + expect(actual).toEqual(expected); + }); + + it("returns correct swaggers for three layers of dependencies", async () => { + const swaggerPath = resolve(folder, "data-plane/d.json"); + + const actual = [...(await specModel.getAffectedSwaggers(swaggerPath)).keys()].sort(); + + const expected = [ + "data-plane/a.json", + "data-plane/nesting/b.json", + "data-plane/c.json", + "data-plane/d.json", + ] + .map((p) => resolve(folder, p)) + .sort(); + + expect(actual).toEqual(expected); + }); + + it("returns correctly for multiple shared dependencies", async () => { + const swaggerPath = resolve(folder, "data-plane/shared/shared.json"); + + const actual = [...(await specModel.getAffectedSwaggers(swaggerPath)).keys()].sort(); + + const expected = [ + "data-plane/a.json", + "data-plane/nesting/b.json", + "data-plane/c.json", + "data-plane/d.json", + "data-plane/shared/shared.json", + "data-plane/e.json", + ] + .map((p) => resolve(folder, p)) + .sort(); + + expect(actual).toEqual(expected); + }); + }); +}); + +// TODO: Update tests for new object-oriented API + +// Stress test the parser against all specs in the specification/ folder. This +// is a long-running test and should be run manually. To run this test, remove +// the '.skip' from the describe block. Put '.skip' back in when done or this +// test may fail unexpectedly in the future. +describe.skip("Parse readmes", () => { + it( + "Does not produce exceptions", + { timeout: 30 * 60 * 1000 /* 30 minutes */ }, + async ({ expect }) => { + const excludeFolders = [ + "authorization", // specification/authorization/resource-manager/readme.md defines has duplicate tags including 'package-2020-10-01' + "azureactivedirectory", // specification/azureactivedirectory/resource-manager/readme.md has duplicate tags including 'package-preview-2020-07' + "cost-management", // specification/cost-management/resource-manager/readme.md has duplicate tags including 'package-2019-01' + "migrate", // specification/migrate/resource-manager/readme.md has duplicate tags including 'package-migrate-2023-04' + "quota", // specification/quota/resource-manager/readme.md has duplicate tags including 'package-2023-02-01' + "redisenterprise", // specification/redisenterprise/resource-manager/readme.md has duplicate tags including 'package-2024-02' + "security", // specification/security/resource-manager/readme.md has duplicate tags including 'package-2021-07-preview-only' + "confidentialledger", // data-plane/readme.md tag 'package-2022-04-20-preview-ledger' points to a swagger file that doesn't exist + "network", // network takes a long time to evaluate + "servicenetworking", // servicenetworking includes a swagger file which references a file that doesn't exist + ]; + + // List all folders under specification/ + const folders = await readdir(join(repoRoot, "specification"), { + withFileTypes: true, + }); + const services = folders + .filter((f) => f.isDirectory() && !excludeFolders.includes(f.name)) + .map((f) => f.name); + for (const folder of services) { + // Folders are listed in alphabetical order, when running this function + // iteratively over all service folders, a value can be placed in in this + // condition to skip folders that appear before a given folder. This means + // you won't have to wait for tests to run over all folders that have + // previously passed. + if (folder < "000") { + console.log(`Skipping service: ${folder}`); + continue; + } + + console.log(`Testing service: ${folder}`); + const specModel = new SpecModel(`specification/${folder}`, options); + + expect(specModel).toBeDefined(); + } + }, + ); + + it( + "runs properly against specific services", + { timeout: 30 * 60 * 1000 /* 30 minutes */ }, + async ({ expect }) => { + const folders = [ + // Fill in services to test here + ]; + for (const folder of folders) { + console.log(`Testing service: ${folder}`); + const specModel = new SpecModel(`specification/${folder}`, options); + + expect(specModel).toBeDefined(); + } + }, + ); +}); diff --git a/.github/shared/test/swagger.test.js b/.github/shared/test/swagger.test.js new file mode 100644 index 000000000000..42c025d268b7 --- /dev/null +++ b/.github/shared/test/swagger.test.js @@ -0,0 +1,50 @@ +// @ts-check + +import { dirname, resolve } from "path"; +import { describe, expect, it } from "vitest"; +import { Swagger } from "../src/swagger.js"; + +import { fileURLToPath } from "url"; +import { Readme } from "../src/readme.js"; +import { Tag } from "../src/tag.js"; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe("Swagger", () => { + it("can be created with mock path", async () => { + const swagger = new Swagger("bar"); + expect(swagger.path).toBe(resolve("bar")); + expect(swagger.tag).toBeUndefined(); + + await expect(swagger.getRefs()).rejects.toThrowError(/Failed to resolve file for swagger/i); + }); + + it("resolves path against Tag.readme", async () => { + const readme = new Readme("/specs/foo/readme.md"); + const tag = new Tag("2025-01-01", [], { readme }); + const swagger = new Swagger("test.json", { tag }); + + expect(swagger.path).toBe(resolve("/specs/foo/test.json")); + }); + + // TODO: Test that path is resolved against backpointer + + it("excludes example files", async () => { + const swagger = new Swagger(resolve(__dirname, "fixtures/Swagger/ignoreExamples/swagger.json")); + const refs = await swagger.getRefs(); + + const expectedIncludedPath = resolve( + __dirname, + "fixtures/Swagger/ignoreExamples/included.json", + ); + expect(refs).toMatchObject( + new Map([ + [ + expectedIncludedPath, + expect.objectContaining({ + path: expect.stringContaining(expectedIncludedPath), + }), + ], + ]), + ); + }); +}); diff --git a/.github/shared/test/tag.test.js b/.github/shared/test/tag.test.js new file mode 100644 index 000000000000..ec1d02b363c6 --- /dev/null +++ b/.github/shared/test/tag.test.js @@ -0,0 +1,21 @@ +// @ts-check + +import { describe, expect, it } from "vitest"; + +import { resolve } from "path"; +import { Tag } from "../src/tag.js"; + +describe("Tag", () => { + it("can be created with mock swaggers", async () => { + const tag = new Tag("tag", ["swagger"]); + + expect(tag.name).toBe("tag"); + expect(tag.readme).toBeUndefined(); + + expect(tag.inputFiles.size).toBe(1); + const swagger = [...tag.inputFiles.values()][0]; + expect(swagger.path).toBe(resolve("swagger")); + + await expect(swagger.getRefs()).rejects.toThrowError(/Failed to resolve file for swagger/i); + }); +}); diff --git a/.github/shared/tsconfig.json b/.github/shared/tsconfig.json new file mode 100644 index 000000000000..fd8d4c43ea86 --- /dev/null +++ b/.github/shared/tsconfig.json @@ -0,0 +1,15 @@ +// Only used for type-checking of JavaScript sources. Folder should contain no TypeScript sources. +{ + "extends": "@tsconfig/node20/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + // Avoid generating tsconfig.tsbuildinfo file, which isn't useful for our scenario + "incremental": false, + "noEmit": true, + }, + "include": [ + // Only check runtime sources. Tests currently have too many errors. + "**/src/**/*.js", + ], +} diff --git a/.github/shared/vitest.config.js b/.github/shared/vitest.config.js new file mode 100644 index 000000000000..d9fa13bfbfba --- /dev/null +++ b/.github/shared/vitest.config.js @@ -0,0 +1,20 @@ +import { configDefaults, defineConfig } from "vitest/config"; + +export default defineConfig({ + esbuild: { + // Ignore tsconfig.json, since it's only used for type checking, and causes + // a warning if vitest tries to load it + tsConfig: false, + }, + + test: { + coverage: { + exclude: [ + ...configDefaults.coverage.exclude, + + // Not worth testing CLI code + "cmd/**/*.js", + ], + }, + }, +}); diff --git a/.github/src/changed-files.js b/.github/src/changed-files.js deleted file mode 100644 index f5e2046e3782..000000000000 --- a/.github/src/changed-files.js +++ /dev/null @@ -1,123 +0,0 @@ -// @ts-check - -import { diff } from "./git.js"; - -/** - * @param {Object} [options] - * @param {string} [options.baseCommitish] Default: "HEAD^". - * @param {string} [options.cwd] Current working directory. Default: process.cwd(). - * @param {string} [options.headCommitish] Default: "HEAD". - * @param {import('./types.js').ILogger} [options.logger] - * @returns {Promise} List of changed files, using posix paths, relative to options.cwd. Example: ["specification/foo/Microsoft.Foo/main.tsp"]. - */ -export async function getChangedFiles(options = {}) { - const { - baseCommitish = "HEAD^", - cwd, - headCommitish = "HEAD", - logger, - } = options; - - // TODO: If we need to filter based on status, instead of passing an argument to `--diff-filter, - // consider using "--name-status" instead of "--name-only", and return an array of objects like - // { name: "/foo/baz.js", status: Status.Renamed, previousName: "/foo/bar.js"}. - // Then add filter functions to filter based on status. This is more flexible and lets consumers - // filter based on status with a single call to `git diff`. - const result = await diff(baseCommitish, headCommitish, { - args: "--name-only", - cwd, - logger: logger, - }); - - const files = result.trim().split("\n"); - - logger?.info("Changed Files:"); - for (const file of files) { - logger?.info(` ${file}`); - } - logger?.info(""); - - return files; -} - -// Functions suitable for passing to string[].filter(), ordered roughly in order of increasing specificity - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function json(file) { - // Extension "json" with any case is a valid JSON file - return typeof file === "string" && file.toLowerCase().endsWith(".json"); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function readme(file) { - // Filename "readme.md" with any case is a valid README file - return typeof file === "string" && file.toLowerCase().endsWith("readme.md"); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function specification(file) { - // Folder name "specification" should match case, since it already exists in repo - return typeof file === "string" && file.startsWith("specification/"); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function dataPlane(file) { - // Folder name "data-plane" should match case for consistency across specs - return ( - typeof file === "string" && - specification(file) && - file.includes("/data-plane/") - ); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function resourceManager(file) { - // Folder name "resource-manager" should match case for consistency across specs - return ( - typeof file === "string" && - specification(file) && - file.includes("/resource-manager/") - ); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function example(file) { - // Folder name "examples" should match case for consistency across specs - return ( - typeof file === "string" && - json(file) && - specification(file) && - file.includes("/examples/") - ); -} - -/** - * @param {string} [file] - * @returns {boolean} - */ -export function swagger(file) { - return ( - typeof file === "string" && - json(file) && - (dataPlane(file) || resourceManager(file)) && - !example(file) - ); -} diff --git a/.github/src/exec.js b/.github/src/exec.js deleted file mode 100644 index 5ec47d9a0ce3..000000000000 --- a/.github/src/exec.js +++ /dev/null @@ -1,55 +0,0 @@ -// @ts-check - -import child_process from "child_process"; -import { promisify } from "util"; -const execImpl = promisify(child_process.exec); - -/** - * @param {string} command - * @param {Object} [options] - * @param {string} [options.cwd] Current working directory. Default: process.cwd(). - * @param {import('./types.js').ILogger} [options.logger] - * @param {number} [options.maxBuffer] - */ -export async function exec(command, options = {}) { - const { - cwd, - logger, - // Node default is 1024 * 1024, which is too small for some git commands returning many entities or large file content. - // To support "git show", should be larger than the largest swagger file in the repo (2.5 MB as of 2/28/2025). - maxBuffer = 16 * 1024 * 1024, - } = options; - - logger?.info(`exec("${command}")`); - - // TODO: Handle errors - const result = await execImpl(command, { - cwd, - maxBuffer, - }); - - logger?.debug(`stdout: '${result.stdout}'`); - logger?.debug(`stderr: '${result.stderr}'`); - - return result.stdout; -} - -/** - * Joins a list of arguments to build a command-line without extra spaces. - * Ignores null, undefined, and elements that convert to empty or all-whitespace. - * - * @param {any[]} args - * @returns string - */ -export function buildCmd(...args) { - return ( - args - // Exclude null and undefined - .filter((arg) => arg !== null && arg !== undefined) - // Convert to string - .map((arg) => String(arg)) - // Exclude empty and all-whitespace - .filter((str) => str.trim() !== "") - .join(" ") - ); -} diff --git a/.github/src/git.js b/.github/src/git.js deleted file mode 100644 index f0f1c5ba79c6..000000000000 --- a/.github/src/git.js +++ /dev/null @@ -1,76 +0,0 @@ -// @ts-check - -import { buildCmd, exec } from "./exec.js"; - -/** - * @typedef {import('./types.js').ILogger} ILogger - */ - -/** - * @param {string} baseCommitish - * @param {string} headCommitish - * @param {Object} [options] - * @param {string} [options.args] - * @param {string} [options.cwd] Current working directory. Default: process.cwd(). - * @param {ILogger} [options.logger] - * @returns {Promise} - */ -export async function diff(baseCommitish, headCommitish, options = {}) { - const { args, cwd, logger } = options; - - const cmd = buildCmd("diff", args, baseCommitish, headCommitish); - - return await execGit(cmd, { cwd, logger }); -} - -/** - * @param {string} treeIsh - * @param {string} path - * @param {Object} [options] - * @param {string} [options.args] - * @param {string} [options.cwd] Current working directory. Default process.cwd(). - * @param {ILogger} [options.logger] - * @returns {Promise} - */ -export async function lsTree(treeIsh, path, options = {}) { - const { args, cwd, logger } = options; - - const cmd = buildCmd("ls-tree", args, treeIsh, path); - - return await execGit(cmd, { cwd, logger }); -} - -/** - * @param {string} treeIsh - * @param {string} path - * @param {Object} [options] - * @param {string} [options.args] - * @param {string} [options.cwd] Current working directory. Default: process.cwd(). - * @param {ILogger} [options.logger] - * @returns {Promise} - */ -export async function show(treeIsh, path, options = {}) { - const { args, cwd, logger } = options; - - const cmd = buildCmd("show", args, `${treeIsh}:${path}`); - - return await execGit(cmd, { cwd, logger }); -} - -/** - * @param {string} args - * @param {Object} [options] - * @param {string} [options.cwd] Current working directory. Default: process.cwd(). - * @param {ILogger} [options.logger] - * @returns {Promise} - */ -async function execGit(args, options = {}) { - const { cwd, logger } = options; - - // Ensure that git displays filenames as they are (without escaping) - const defaultConfig = "-c core.quotepath=off"; - - const cmd = buildCmd("git", defaultConfig, args); - - return await exec(cmd, { cwd, logger }); -} diff --git a/.github/src/readme.js b/.github/src/readme.js deleted file mode 100644 index 8a00c24f3ba1..000000000000 --- a/.github/src/readme.js +++ /dev/null @@ -1,36 +0,0 @@ -// @ts-check - -import yaml from "js-yaml"; -import { marked } from "marked"; - -/** - * @param {string} markdown - * @param {Object} [options] - * @param {import('./types.js').ILogger} [options.logger] - * @returns {Promise>} All input files for all tags - */ -export async function getInputFiles(markdown, options = {}) { - const { logger } = options; - - const tokens = marked.lexer(markdown); - - const yamlBlocks = tokens - .filter((token) => token.type === "code") - .map((token) => /** @type import("marked").Tokens.Code */ (token)) - // Include default block and tagged blocks (```yaml $(tag) == 'package-2021-11-01') - .filter((token) => token.lang?.toLowerCase().startsWith("yaml")); - - const inputFiles = yamlBlocks.flatMap((block) => { - const tag = - block.lang?.match(/yaml \$\(tag\) == '([^']*)'/)?.[1] || "default"; - - const obj = /** @type {any} */ (yaml.load(block.text)); - const blockFiles = /** @type string[] */ (obj["input-file"] || []); - - logger?.info(`Input files for tag '${tag}': ${JSON.stringify(blockFiles)}`); - - return blockFiles; - }); - - return new Set(inputFiles); -} diff --git a/.github/src/types.js b/.github/src/types.js deleted file mode 100644 index 7d55ac238c9f..000000000000 --- a/.github/src/types.js +++ /dev/null @@ -1,11 +0,0 @@ -// @ts-check - -/** - * @typedef {Object} ILogger - * @property {(message:string) => void} debug - * @property {(message:string) => void} info - * @property {() => boolean} isDebug - */ - -// Empty export to make valid module -export {}; diff --git a/.github/test/changed-files.test.js b/.github/test/changed-files.test.js deleted file mode 100644 index 2122b3e5fffb..000000000000 --- a/.github/test/changed-files.test.js +++ /dev/null @@ -1,114 +0,0 @@ -import { describe, expect, it, vi } from "vitest"; -import { - dataPlane, - example, - getChangedFiles, - json, - readme, - resourceManager, - specification, - swagger, -} from "../src/changed-files.js"; -import * as git from "../src/git.js"; -import { createMockLogger } from "./mocks.js"; - -describe("changedFiles", () => { - it.each([{}, { logger: createMockLogger() }])( - `getChangedFiles(%o)`, - async (options) => { - const files = [ - ".github/src/git.js", - "specification/contosowidgetmanager/Contoso.Management/main.tsp", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - vi.spyOn(git, "diff").mockResolvedValue(files.join("\n")); - - await expect(getChangedFiles(options)).resolves.toEqual(files); - }, - ); - - const files = [ - "cspell.json", - "cspell.yaml", - "MixedCase.jSoN", - "README.MD", - "specification/contosowidgetmanager/data-plane/readme.md", - "specification/contosowidgetmanager/Contoso.Management/main.tsp", - "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", - "specification/contosowidgetmanager/resource-manager/readme.md", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - it("filter:json", () => { - const expected = [ - "cspell.json", - "MixedCase.jSoN", - "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - expect(files.filter(json)).toEqual(expected); - }); - - it("filter:readme", () => { - const expected = [ - "README.MD", - "specification/contosowidgetmanager/data-plane/readme.md", - "specification/contosowidgetmanager/resource-manager/readme.md", - ]; - - expect(files.filter(readme)).toEqual(expected); - }); - - it("filter:specification", () => { - const expected = [ - "specification/contosowidgetmanager/data-plane/readme.md", - "specification/contosowidgetmanager/Contoso.Management/main.tsp", - "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", - "specification/contosowidgetmanager/resource-manager/readme.md", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - expect(files.filter(specification)).toEqual(expected); - }); - - it("filter:data-plane", () => { - const expected = [ - "specification/contosowidgetmanager/data-plane/readme.md", - ]; - - expect(files.filter(dataPlane)).toEqual(expected); - }); - - it("filter:resource-manager", () => { - const expected = [ - "specification/contosowidgetmanager/resource-manager/readme.md", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - expect(files.filter(resourceManager)).toEqual(expected); - }); - - it("filter:example", () => { - const expected = [ - "specification/contosowidgetmanager/Contoso.Management/examples/2021-11-01/Employees_Get.json", - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Employees_Get.json", - ]; - - expect(files.filter(example)).toEqual(expected); - }); - - it("filter:swagger", () => { - const expected = [ - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json", - ]; - - expect(files.filter(swagger)).toEqual(expected); - }); -}); diff --git a/.github/test/examples.js b/.github/test/examples.js deleted file mode 100644 index 14c415c61ad2..000000000000 --- a/.github/test/examples.js +++ /dev/null @@ -1,60 +0,0 @@ -// @ts-check - -export const swaggerHandWritten = JSON.stringify("foo"); - -export const swaggerTypeSpecGenerated = JSON.stringify({ - info: { - "x-typespec-generated": [{ emitter: "@azure-tools/typespec-autorest" }], - }, -}); - -export const contosoReadme = ` -# contosowidgetmanager - -> see https://aka.ms/autorest -This is the AutoRest configuration file for Contoso. - -## Getting Started - -To build the SDKs for My API, simply install AutoRest via \`npm\` (\`npm install -g autorest\`) and then run: - -> \`autorest readme.md\` -To see additional help and options, run: - -> \`autorest --help\` -For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. - ---- - -## Configuration - -### Basic Information - -These are the global settings for the containerstorage. - -\`\`\`yaml -openapi-type: arm -openapi-subtype: rpaas -tag: package-2021-11-01 -\`\`\` - -### Tag: package-2021-11-01 - -These settings apply only when \`--tag=package-2021-11-01\` is specified on the command line. - -\`\`\`yaml $(tag) == 'package-2021-11-01' -input-file: - - Microsoft.Contoso/stable/2021-11-01/contoso.json -\`\`\` - -### Tag: package-2021-10-01-preview - -These settings apply only when \`--tag=package-2021-10-01-preview\` is specified on the command line. - -\`\`\`yaml $(tag) == 'package-2021-10-01-preview' -input-file: - - Microsoft.Contoso/preview/2021-10-01-preview/contoso.json -\`\`\` - ---- -`; diff --git a/.github/test/exec.test.js b/.github/test/exec.test.js deleted file mode 100644 index d2199237d2dc..000000000000 --- a/.github/test/exec.test.js +++ /dev/null @@ -1,65 +0,0 @@ -import { EOL } from "os"; -import { describe, expect, it } from "vitest"; -import { buildCmd, exec } from "../src/exec.js"; -import { createMockLogger } from "./mocks.js"; - -describe("exec", () => { - const str = "test"; - const cmd = `echo ${str}`; - const expected = `${str}${EOL}`; - - it.each([{}, { logger: createMockLogger() }])( - "exec succeeds with default buffer (options: %o)", - async (options) => { - await expect(exec(cmd, options)).resolves.toEqual(expected); - }, - ); - - it("exec succeeds with exact-sized buffer", async () => { - await expect(exec(cmd, { maxBuffer: expected.length })).resolves.toEqual( - expected, - ); - }); - - it("exec fails with too-small buffer", async () => { - await expect( - exec(cmd, { maxBuffer: expected.length - 1 }), - ).rejects.toThrowError( - expect.objectContaining({ code: "ERR_CHILD_PROCESS_STDIO_MAXBUFFER" }), - ); - }); - - it("buildCmd", () => { - const cmd = buildCmd( - "foo", - // Excluded - null, - // Arg is *not* trimmed, so trailing whitespace is preserved - "spaceAfter ", - // Excluded since all-whitespace - " \t", - // Excluded - undefined, - // Arg is *not* trimmed, so leading whitespace is preserved - " spaceBefore", - // Non-string values, both truthy and falsy - true, - false, - -1, - 0, - 1, - "", - NaN, - // Invalid, but we currently don't exclude it, so test it - {}, - // Converts to "", so excluded as empty string - [], - // Converts to string - ["arrayElement1", "arrayElement2"], - ["newArray"], - ); - expect(cmd).toBe( - "foo spaceAfter spaceBefore true false -1 0 1 NaN [object Object] arrayElement1,arrayElement2 newArray", - ); - }); -}); diff --git a/.github/test/git.test.js b/.github/test/git.test.js deleted file mode 100644 index 4bc85d9493ed..000000000000 --- a/.github/test/git.test.js +++ /dev/null @@ -1,67 +0,0 @@ -// @ts-check - -import { describe, expect, it, vi } from "vitest"; -import * as exec from "../src/exec.js"; -import { diff, lsTree, show } from "../src/git.js"; - -describe("git", () => { - describe("e2e", () => { - it("diff", async () => { - await expect(diff("HEAD", "HEAD")).resolves.toBe(""); - }); - - it("lsTree", async () => { - // lsTree always uses "\n" in output, even on windows - const expected = ".github\n"; - - await expect( - lsTree("HEAD", ".github", { args: "--full-tree --name-only" }), - ).resolves.toBe(expected); - }); - - it("show", async () => { - await expect(show("HEAD", ".github/package.json")).resolves.toContain( - "scripts", - ); - }); - }); - - describe("mocked", () => { - it("diff", async () => { - const execSpy = vi.spyOn(exec, "exec").mockResolvedValue("test diff"); - - await expect(diff("HEAD^", "HEAD")).resolves.toBe("test diff"); - - expect(execSpy).toBeCalledWith( - "git -c core.quotepath=off diff HEAD^ HEAD", - expect.anything(), - ); - }); - - it("lsTree", async () => { - const execSpy = vi.spyOn(exec, "exec").mockResolvedValue("test lstree"); - - await expect( - lsTree("HEAD", "specification/contosowidgetmanager"), - ).resolves.toBe("test lstree"); - - expect(execSpy).toBeCalledWith( - "git -c core.quotepath=off ls-tree HEAD specification/contosowidgetmanager", - expect.anything(), - ); - }); - - it("show", async () => { - const execSpy = vi.spyOn(exec, "exec").mockResolvedValue("test show"); - - await expect( - show("HEAD", "specification/contosowidgetmanager/cspell.yaml"), - ).resolves.toBe("test show"); - - expect(execSpy).toBeCalledWith( - "git -c core.quotepath=off show HEAD:specification/contosowidgetmanager/cspell.yaml", - expect.anything(), - ); - }); - }); -}); diff --git a/.github/test/mocks.js b/.github/test/mocks.js deleted file mode 100644 index 621fd750bce4..000000000000 --- a/.github/test/mocks.js +++ /dev/null @@ -1,14 +0,0 @@ -// @ts-check - -import { vi } from "vitest"; - -/** - * @returns {import('../src/types.js').ILogger} - */ -export function createMockLogger() { - return { - debug: vi.fn(), - info: vi.fn(), - isDebug: vi.fn().mockReturnValue(false), - }; -} diff --git a/.github/test/readme.test.js b/.github/test/readme.test.js deleted file mode 100644 index e0a6e5bf5641..000000000000 --- a/.github/test/readme.test.js +++ /dev/null @@ -1,14 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { getInputFiles } from "../src/readme.js"; -import { contosoReadme } from "./examples.js"; - -describe("readme", () => { - it("getInputFiles", async () => { - await expect(getInputFiles(contosoReadme)).resolves.toEqual( - new Set([ - "Microsoft.Contoso/stable/2021-11-01/contoso.json", - "Microsoft.Contoso/preview/2021-10-01-preview/contoso.json", - ]), - ); - }); -}); diff --git a/.github/test/types.test.js b/.github/test/types.test.js deleted file mode 100644 index 0f87b4f13102..000000000000 --- a/.github/test/types.test.js +++ /dev/null @@ -1,9 +0,0 @@ -import { describe, expect, it } from "vitest"; -import * as types from "../src/types.js"; - -describe("types", () => { - // Added primarily to get 100% code coverage, but also good to enforce - it("has no exports", async () => { - expect(Object.keys(types)).toEqual([]); - }); -}); diff --git a/.github/tsconfig.json b/.github/tsconfig.json index 429bb29460da..fd8d4c43ea86 100644 --- a/.github/tsconfig.json +++ b/.github/tsconfig.json @@ -10,6 +10,6 @@ }, "include": [ // Only check runtime sources. Tests currently have too many errors. - "**/src/**/*.js" - ] + "**/src/**/*.js", + ], } diff --git a/.github/vitest.config.js b/.github/vitest.config.js new file mode 100644 index 000000000000..ae56b27cb71c --- /dev/null +++ b/.github/vitest.config.js @@ -0,0 +1,26 @@ +import { configDefaults, defineConfig } from "vitest/config"; + +export default defineConfig({ + esbuild: { + // Ignore tsconfig.json, since it's only used for type checking, and causes + // a warning if vitest tries to load it + tsConfig: false, + }, + + test: { + coverage: { + exclude: [ + ...configDefaults.coverage.exclude, + + // Not worth testing CLI code + "**/cmd/**", + + // Ignore all coverage folders + "**/coverage/**", + + // Ignore all test folders + "**/test/**", + ], + }, + }, +}); diff --git a/.github/workflows/SDK-Suppressions-Label.yaml b/.github/workflows/SDK-Suppressions-Label.yaml index e62f580ea5a6..88f5511d1c6f 100644 --- a/.github/workflows/SDK-Suppressions-Label.yaml +++ b/.github/workflows/SDK-Suppressions-Label.yaml @@ -24,18 +24,6 @@ jobs: - name: Setup Node and install deps uses: ./.github/actions/setup-node-install-deps - - name: Get GitHub PullRequest Changed Files - shell: pwsh - id: get-changedFiles - run: | - . eng/scripts/ChangedFiles-Functions.ps1 - $changedFiles = @(Get-ChangedFiles) - $changedFilesArray = $changedFiles -split ' ' - $yamlFilesArray = $changedFilesArray | Where-Object { $_ -match 'sdk-suppressions\.yaml$' } - $yamlFiles = $yamlFilesArray -join ' ' - echo "PR Changed sdk-suppressions.yaml files: $yamlFiles" - Add-Content -Path $env:GITHUB_OUTPUT -Value "changedFiles=$yamlFiles" - - name: Get GitHub PullRequest Context uses: actions/github-script@v7 id: fetch-pullRequest-context @@ -45,18 +33,17 @@ jobs: if (!pr) { throw new Error("This workflow must run in the context of a pull request."); } - console.log("This action trigger by ", context.eventName); + console.log("This action trigger by", context.eventName); core.setOutput("prLabels", pr.labels.map(label => label.name)); result-encoding: string - - name: Run Get suppressions label script + - name: Run Get suppressions Label Script id: run-suppressions-script env: OUTPUT_FILE: "output.json" - GITHUB_PULL_REQUEST_CHANGE_FILES: ${{ steps.get-changedFiles.outputs.changedFiles }} GITHUB_PULL_REQUEST_LABELS: ${{ steps.fetch-pullRequest-context.outputs.prLabels }} run: | - node eng/tools/sdk-suppressions/cmd/sdk-suppressions-label.js HEAD^ HEAD "$GITHUB_PULL_REQUEST_CHANGE_FILES" "$GITHUB_PULL_REQUEST_LABELS" + node eng/tools/sdk-suppressions/cmd/sdk-suppressions-label.js HEAD^ HEAD "$GITHUB_PULL_REQUEST_LABELS" OUTPUT=$(cat $OUTPUT_FILE) echo "Script output labels: $OUTPUT" diff --git a/.github/workflows/_reusable-eng-tools-test.yaml b/.github/workflows/_reusable-eng-tools-test.yaml index c87bd60c1dad..02c135d7b1ec 100644 --- a/.github/workflows/_reusable-eng-tools-test.yaml +++ b/.github/workflows/_reusable-eng-tools-test.yaml @@ -15,10 +15,6 @@ on: description: Run 'npm run lint' if true required: false type: boolean - prettier: - description: Run 'npm run prettier' if true - required: false - type: boolean permissions: contents: read @@ -54,21 +50,24 @@ jobs: with: node-version: ${{ matrix.node-version }}.x - - run: npm run build + - name: Build + run: npm run build shell: pwsh working-directory: ./eng/tools/${{ inputs.package }} - - run: npm run lint + - name: Lint + run: npm run lint if: inputs.lint == true shell: pwsh working-directory: ./eng/tools/${{ inputs.package }} - - run: npm run prettier - if: inputs.prettier == true + - name: Check Formatting + run: npm run format:check:ci shell: pwsh working-directory: ./eng/tools/${{ inputs.package }} - - run: npm run test:ci + - name: Test + run: npm run test:ci shell: pwsh working-directory: ./eng/tools/${{ inputs.package }} diff --git a/.github/workflows/_reusable-set-check-status.yml b/.github/workflows/_reusable-set-check-status.yml new file mode 100644 index 000000000000..6589ccc1199d --- /dev/null +++ b/.github/workflows/_reusable-set-check-status.yml @@ -0,0 +1,68 @@ +# Prefix with "~" to sort last in Actions list +name: ~Templates - Set Run Status + +on: + workflow_call: + inputs: + monitored_workflow_name: + description: Name of the workflow to monitor + required: true + type: string + required_check_name: + description: Name of the required check to update + required: true + type: string + overriding_label: + description: Comma-separated list of labels that, when any is set, causes the check to always pass + required: true + type: string + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + set-status: + name: "${{ inputs.required_check_name }} - Set Status" + + if: | + github.event_name == 'workflow_run' || + (github.event_name == 'pull_request_target' && + ((github.event.action == 'opened' || + github.event.action == 'synchronize' || + github.event.action == 'reopened') || + ((github.event.action == 'labeled' || github.event.action == 'unlabeled') && + inputs.overriding_label && contains(inputs.overriding_label, github.event.label.name)))) + + runs-on: ubuntu-24.04 + + steps: + # *** IMPORTANT *** + # For workflows that are triggered by the pull_request_target event, the workflow runs in the + # context of the base of the pull request. You should make sure that you do not check out, + # build, or run untrusted code from the head of the pull request. + - uses: actions/checkout@v4 + with: + # Only needs .github folder for automation, not the files in the PR (analyzed in a + # separate workflow). + # + # Uses the .github folder from the PR base branch (pull_request_target trigger), + # or the repo default branch (other triggers). + sparse-checkout: | + .github + + - name: "Set Status" + uses: actions/github-script@v7 + with: + script: | + const { default: setStatus } = + await import('${{ github.workspace }}/.github/workflows/src/set-status.js'); + return await setStatus( + { github, context, core }, + '${{ inputs.monitored_workflow_name }}', + '${{ inputs.required_check_name }}', + '${{ inputs.overriding_label }}' + ); diff --git a/.github/workflows/_reusable-verify-run-status.yaml b/.github/workflows/_reusable-verify-run-status.yaml new file mode 100644 index 000000000000..ab08c8788506 --- /dev/null +++ b/.github/workflows/_reusable-verify-run-status.yaml @@ -0,0 +1,47 @@ +# Prefix with "~" to sort last in Actions list +name: ~Templates - Verify Run Status + +on: + workflow_call: + inputs: + check_run_name: + description: Name of the check run to verify + required: true + type: string + commit_status_name: + description: Name of the commit status to verify + type: string + workflow_name: + description: Name of the workflow to verify + type: string + +permissions: + checks: read + contents: read + +jobs: + check-run-status: + # Skip the workflow_run conclusion of 'action_required'. This conclusion + # happens when a workflow is triggered by a PR submitted by a user who does + # not have correct permissions. + if: | + (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'action_required') || + (github.event_name == 'check_suite' && github.event.check_suite.app.name == 'openapi-pipeline-app') || + (github.event_name == 'check_run' && github.event.check_run.name == inputs.check_run_name) + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + .github + + - name: Verify matching status + uses: actions/github-script@v7 + with: + script: | + const { verifyRunStatus } = await import('${{ github.workspace }}/.github/workflows/src/verify-run-status.js'); + return await verifyRunStatus({ github, context, core }); + env: + CHECK_RUN_NAME: ${{ inputs.check_run_name }} + COMMIT_STATUS_NAME: ${{ inputs.commit_status_name }} + WORKFLOW_NAME: ${{ inputs.workflow_name }} diff --git a/.github/workflows/avocado-code.yml b/.github/workflows/avocado-code.yml new file mode 100644 index 000000000000..f14a7bc69ef1 --- /dev/null +++ b/.github/workflows/avocado-code.yml @@ -0,0 +1,41 @@ +name: "[TEST-IGNORE] Swagger Avocado - Analyze Code" + +on: pull_request + +permissions: + contents: read + +jobs: + avocado-code: + name: "[TEST-IGNORE] Swagger Avocado - Analyze Code" + + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v4 + with: + # Must include all branches, for git branch logic in Avocado to work correctly + fetch-depth: 0 + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Run Avocado + run: | + npm exec --no -- avocado \ + --excludePaths \ + "/common-types/" \ + "/scenarios/" \ + "/package.json" \ + "/package-lock.json" \ + "/cadl/examples/" \ + '(?=/examples/)(?!(?:/stable/|/preview/))' \ + "/\\.github/" \ + "/eng/" \ + --includePaths \ + "data-plane" \ + "resource-manager" + env: + # Tells Avocado to analyze the files changed between the PR head (default checkout) + # and the PR base branch. + SYSTEM_PULLREQUEST_TARGETBRANCH: ${{ github.event.pull_request.base.ref }} diff --git a/.github/workflows/avocado-status.yaml b/.github/workflows/avocado-status.yaml new file mode 100644 index 000000000000..6a0d00913f38 --- /dev/null +++ b/.github/workflows/avocado-status.yaml @@ -0,0 +1,35 @@ +name: "[TEST-IGNORE] Swagger Avocado - Set Status" + +on: + # Must run on pull_request_target instead of pull_request, since the latter cannot trigger on + # labels from bot accounts in fork PRs. pull_request_target is also more similar to the other + # trigger "workflow_run" -- they are both privileged and run in the target branch and repo -- + # which simplifies implementation. + pull_request_target: + types: + # Run workflow on default types, to update status as quickly as possible. + - opened + - synchronize + - reopened + # Depends on labels, so must re-evaluate whenever a relevant label is manually added or removed. + - labeled + - unlabeled + workflow_run: + workflows: ["\\[TEST-IGNORE\\] Swagger Avocado - Analyze Code"] + types: [completed] + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + avocado-status: + name: Set Avocado Status + uses: ./.github/workflows/_reusable-set-check-status.yml + with: + monitored_workflow_name: "[TEST-IGNORE] Swagger Avocado - Analyze Code" + required_check_name: "[TEST-IGNORE] Swagger Avocado" + overriding_label: "Approved-Avocado" diff --git a/.github/workflows/breaking-change-code.yaml b/.github/workflows/breaking-change-code.yaml new file mode 100644 index 000000000000..d92a6ab6307f --- /dev/null +++ b/.github/workflows/breaking-change-code.yaml @@ -0,0 +1,69 @@ +name: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code" + +on: pull_request + +permissions: + contents: read + +jobs: + validateBreakingChange: + name: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code" + runs-on: ubuntu-24.04 + + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Setup .NET 6 SDK + uses: actions/setup-dotnet@v4 + with: + dotnet-version: "6.0.x" + + - name: Swagger Breaking Change - Analyze Code + id: swagger-breaking-change-analyze-code + run: | + npm exec --no -- openapi-diff-runner \ + --srp "${{ github.workspace }}" \ + --number "${{ github.event.pull_request.number }}" \ + --sb "${{ github.event.pull_request.head.ref }}" \ + --tb "${{ github.event.pull_request.base.ref }}" \ + --hc "${{ github.event.pull_request.head.sha }}" \ + --repo "${{ github.repository }}" + + # Upload artifact for 'BreakingChangeReviewRequired' label + - if: | + always() && + (steps.swagger-breaking-change-analyze-code.outputs.breakingChangeReviewLabelName != '') + name: Upload artifact with BreakingChangeReviewRequiredLabel label + uses: ./.github/actions/add-label-artifact + with: + name: "${{ steps.swagger-breaking-change-analyze-code.outputs.breakingChangeReviewLabelName }}" + value: "${{ steps.swagger-breaking-change-analyze-code.outputs.breakingChangeReviewLabelValue == 'true' }}" + + # Upload artifact for 'VersioningReviewRequired' label + - if: | + always() && + (steps.swagger-breaking-change-analyze-code.outputs.versioningReviewLabelName != '') + name: Upload artifact with VersioningReviewRequiredLabel label + uses: ./.github/actions/add-label-artifact + with: + name: "${{ steps.swagger-breaking-change-analyze-code.outputs.versioningReviewLabelName }}" + # Convert "add/remove" to "true/false" + value: "${{ steps.swagger-breaking-change-analyze-code.outputs.versioningReviewLabelValue == 'true' }}" + + # Upload artifact with issue number if labels are present and PR number is valid + - if: | + always() && + (steps.swagger-breaking-change-analyze-code.outputs.breakingChangeReviewLabelName != '' || + steps.swagger-breaking-change-analyze-code.outputs.versioningReviewLabelName != '') && + github.event.pull_request.number > 0 + name: Upload artifact with issue number + uses: ./.github/actions/add-empty-artifact + with: + name: "issue-number" + value: "${{ github.event.pull_request.number }}" diff --git a/.github/workflows/breaking-change-status.yaml b/.github/workflows/breaking-change-status.yaml new file mode 100644 index 000000000000..62bbef9f75f9 --- /dev/null +++ b/.github/workflows/breaking-change-status.yaml @@ -0,0 +1,35 @@ +name: "[TEST-IGNORE] Swagger BreakingChange - Set Status" + +on: + # Must run on pull_request_target instead of pull_request, since the latter cannot trigger on + # labels from bot accounts in fork PRs. pull_request_target is also more similar to the other + # trigger "workflow_run" -- they are both privileged and run in the target branch and repo -- + # which simplifies implementation. + pull_request_target: + types: + # Run workflow on default types, to update status as quickly as possible. + - opened + - synchronize + - reopened + # Depends on labels, so must re-evaluate whenever a relevant label is manually added or removed. + - labeled + - unlabeled + workflow_run: + workflows: ["\\[TEST-IGNORE\\] Swagger BreakingChange - Analyze Code"] + types: [completed] + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + breaking-change-status: + name: Set BreakingChange Status + uses: ./.github/workflows/_reusable-set-check-status.yml + with: + monitored_workflow_name: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code" + required_check_name: "[TEST-IGNORE] Swagger BreakingChange" + overriding_label: "BreakingChange-Approved-Benign,BreakingChange-Approved-BugFix,BreakingChange-Approved-UserImpact,BreakingChange-Approved-BranchPolicyException,BreakingChange-Approved-Previously,BreakingChange-Approved-Security,Versioning-Approved-Benign,Versioning-Approved-BugFix,Versioning-Approved-PrivatePreview,Versioning-Approved-BranchPolicyException,Versioning-Approved-Previously,Versioning-Approved-Retired" diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index c94984173eb7..40e9c913f851 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -13,17 +13,17 @@ name: "CodeQL Advanced" on: push: - branches: [ "main", "release-*" ] + branches: ["main", "release-*"] paths: - - .github/** - - eng/tools/** + - .github/** + - eng/tools/** pull_request: - branches: [ "main", "release-*" ] + branches: ["main", "release-*"] paths: - - .github/** - - eng/tools/** + - .github/** + - eng/tools/** schedule: - - cron: '27 4 * * 1' + - cron: "27 4 * * 1" jobs: analyze: @@ -49,21 +49,21 @@ jobs: fail-fast: false matrix: include: - - language: actions - config: | - paths: - - .github - sparse-checkout: | - .github - - language: javascript-typescript - build-mode: none - config: | - paths: - - .github - - eng/tools - sparse-checkout: | - .github - eng/tools + - language: actions + config: | + paths: + - .github + sparse-checkout: | + .github + - language: javascript-typescript + build-mode: none + config: | + paths: + - .github + - eng/tools + sparse-checkout: | + .github + eng/tools # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both @@ -74,48 +74,48 @@ jobs: # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - sparse-checkout: ${{ matrix.sparse-checkout }} + - name: Checkout repository + uses: actions/checkout@v4 + with: + sparse-checkout: ${{ matrix.sparse-checkout }} - # Add any setup steps before running the `github/codeql-action/init` action. - # This includes steps like installing compilers or runtimes (`actions/setup-node` - # or others). This is typically only required for manual builds. - # - name: Setup runtime (example) - # uses: actions/setup-example@v1 + # Add any setup steps before running the `github/codeql-action/init` action. + # This includes steps like installing compilers or runtimes (`actions/setup-node` + # or others). This is typically only required for manual builds. + # - name: Setup runtime (example) + # uses: actions/setup-example@v1 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - build-mode: ${{ matrix.build-mode }} - config: ${{ matrix.config }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + config: ${{ matrix.config }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality - # If the analyze step fails for one of the languages you are analyzing with - # "We were unable to automatically build your code", modify the matrix above - # to set the build mode to "manual" for that language. Then modify this step - # to build your code. - # ℹ️ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - if: matrix.build-mode == 'manual' - shell: bash - run: | - echo 'If you are using a "manual" build mode for one or more of the' \ - 'languages you are analyzing, replace this with the commands to build' \ - 'your code, for example:' - echo ' make bootstrap' - echo ' make release' - exit 1 + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml new file mode 100644 index 000000000000..840384194b83 --- /dev/null +++ b/.github/workflows/copilot-setup-steps.yml @@ -0,0 +1,24 @@ +name: Copilot Setup Steps + +on: workflow_dispatch + +jobs: + copilot-setup-steps: + runs-on: ubuntu-latest + + permissions: + contents: read + checks: read + statuses: write + id-token: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Azure Login with Workload Identity Federation + uses: azure/login@v2 + with: + client-id: "936c56f0-298b-467f-b702-3ad5bf4b15c1" + tenant-id: "72f988bf-86f1-41af-91ab-2d7cd011db47" + allow-no-subscriptions: true diff --git a/.github/workflows/github-test.yaml b/.github/workflows/github-test.yaml index 504879b56cda..846608423fc2 100644 --- a/.github/workflows/github-test.yaml +++ b/.github/workflows/github-test.yaml @@ -14,18 +14,19 @@ on: permissions: contents: read -defaults: - run: - working-directory: ./.github - jobs: test: strategy: matrix: + folder: [.github, .github/shared] os: [ubuntu, windows] runs-on: ${{ fromJSON('{"ubuntu":"ubuntu-24.04", "windows":"windows-2022"}')[matrix.os] }} + defaults: + run: + working-directory: ./${{ matrix.folder }} + steps: - name: Checkout uses: actions/checkout@v4 @@ -33,15 +34,17 @@ jobs: sparse-checkout: | .github - - name: Setup Node 20 and install runtime deps + - if: ${{ matrix.folder == '.github' }} + name: Setup Node 20 and install runtime deps uses: ./.github/actions/setup-node-install-deps with: # actions/github-script@v7 uses Node 20 node-version: 20.x install-command: npm ci --omit dev - working-directory: ./.github + working-directory: ./${{ matrix.folder }} - - name: Verify all modules are importable + - if: ${{ matrix.folder == '.github' }} + name: Verify all modules are importable uses: actions/github-script@v7 with: script: | @@ -56,17 +59,17 @@ jobs: with: # actions/github-script@v7 uses Node 20 node-version: 20.x - working-directory: ./.github + working-directory: ./${{ matrix.folder }} - run: npm run lint - - run: npm run prettier + - run: npm run format:check:ci - run: npm run test:ci - name: Archive code coverage results uses: actions/upload-artifact@v4 with: - name: code-coverage-report-${{ matrix.os }} - path: ./.github/coverage + name: code-coverage-report-${{ matrix.os }}-${{ fromJSON('{".github":"github", ".github/shared":"github-shared"}')[matrix.folder] }} + path: ./${{ matrix.folder }}/coverage if-no-files-found: ignore diff --git a/.github/workflows/lintdiff-code.yaml b/.github/workflows/lintdiff-code.yaml new file mode 100644 index 000000000000..edd328ff1d26 --- /dev/null +++ b/.github/workflows/lintdiff-code.yaml @@ -0,0 +1,65 @@ +name: "Swagger LintDiff - Analyze Code" + +on: pull_request + +permissions: + contents: read + +jobs: + lintdiff: + name: "Swagger LintDiff - Analyze Code" + runs-on: ubuntu-24.04 + + steps: + - name: Checkout eng + uses: actions/checkout@v4 + with: + sparse-checkout: | + eng/ + .github/ + + - name: Checkout 'after' state + uses: actions/checkout@v4 + with: + fetch-depth: 2 + path: after + + - name: Checkout 'before' state + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + path: before + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Get changed files + uses: actions/github-script@v7 + with: + script: | + const { getChangedFiles } = await import('${{ github.workspace }}/.github/shared/src/changed-files.js'); + const { writeFileSync } = await import('fs'); + const { join } = await import('path'); + + // TODO: Logger + const changedFiles = await getChangedFiles({ cwd: 'after'}); + console.log('Changed files:', changedFiles); + + const filePath = join(process.cwd(), 'changed-files.txt'); + writeFileSync(filePath, changedFiles.join('\n'), 'utf8'); + console.log(`Changed files written to ${filePath}`); + + # TODO: Could be github.sha for merge commit + - name: Run LintDiff + run: | + npm exec --no -- lint-diff \ + --before before \ + --after after \ + --changed-files-path changed-files.txt \ + --base-branch ${{ github.event.pull_request.base.ref }} \ + --compare-sha ${{ github.event.pull_request.head.sha }} \ + --out-file $GITHUB_STEP_SUMMARY + env: + # Some LintDiff runs are memory intensive and require more than the + # default. + NODE_OPTIONS: "--max-old-space-size=8192" diff --git a/.github/workflows/lintdiff-status.yaml b/.github/workflows/lintdiff-status.yaml new file mode 100644 index 000000000000..da90f5348c9d --- /dev/null +++ b/.github/workflows/lintdiff-status.yaml @@ -0,0 +1,35 @@ +name: "Swagger LintDiff - Set Status" + +on: + # Must run on pull_request_target instead of pull_request, since the latter cannot trigger on + # labels from bot accounts in fork PRs. pull_request_target is also more similar to the other + # trigger "workflow_run" -- they are both privileged and run in the target branch and repo -- + # which simplifies implementation. + pull_request_target: + types: + # Run workflow on default types, to update status as quickly as possible. + - opened + - synchronize + - reopened + # Depends on labels, so must re-evaluate whenever a relevant label is manually added or removed. + - labeled + - unlabeled + workflow_run: + workflows: ["Swagger LintDiff - Analyze Code"] + types: [completed] + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + lintdiff-status: + name: Set LintDiff Status + uses: ./.github/workflows/_reusable-set-check-status.yml + with: + monitored_workflow_name: "Swagger LintDiff - Analyze Code" + required_check_name: "Swagger LintDiff" + overriding_label: "Approved-LintDiff" diff --git a/.github/workflows/lintdiff-test.yaml b/.github/workflows/lintdiff-test.yaml index 442bdd847f0c..fa0a09658b4a 100644 --- a/.github/workflows/lintdiff-test.yaml +++ b/.github/workflows/lintdiff-test.yaml @@ -9,6 +9,7 @@ on: - package-lock.json - package.json - tsconfig.json + - .github/shared - .github/workflows/_reusable-eng-tools-test.yaml - .github/workflows/lintdiff-test.yaml - eng/tools/package.json @@ -31,4 +32,3 @@ jobs: with: package: lint-diff lint: false - prettier: false diff --git a/.github/workflows/lintdiff.yaml b/.github/workflows/lintdiff.yaml deleted file mode 100644 index 576a33c2496b..000000000000 --- a/.github/workflows/lintdiff.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: "[TEST-IGNORE] Swagger LintDiff" - -on: pull_request - -permissions: - contents: read - -jobs: - lintdiff: - name: "[TEST-IGNORE] Swagger LintDiff" - runs-on: ubuntu-24.04 - - steps: - - name: Checkout eng - uses: actions/checkout@v4 - with: - sparse-checkout: | - eng/ - .github/ - - - name: Checkout 'after' state - uses: actions/checkout@v4 - with: - fetch-depth: 2 - path: after - - - name: Checkout 'before' state - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.base.sha }} - path: before - - - name: Setup Node and install deps - uses: ./.github/actions/setup-node-install-deps - - # TODO: This can probably be updated to run directly from JS - # TODO: default workspace is the after/ folder - - name: Get changed files - run: | - ."./eng/scripts/ChangedFiles-Functions.ps1" - Set-Location ./after - Get-ChangedFiles | Set-Content -Path ../changed-files.txt - Get-Location - Get-Content ../changed-files.txt - shell: pwsh - - - name: Prepend test notice to summary - if: always() - run: | - temp_file=$(mktemp) - echo -e "> [!IMPORTANT]\n> This check is testing a new version of 'Swagger LintDiff'.\n> Failures are expected, and should be completely ignored by spec authors and reviewers.\n> Meaningful results for this PR are are in required check 'Swagger LintDiff'.\n" > $GITHUB_STEP_SUMMARY - - # TODO: Could be github.sha for merge commit - - name: Run LintDiff - run: | - npm exec --no -- lint-diff \ - --before before \ - --after after \ - --changed-files-path changed-files.txt \ - --base-branch ${{ github.event.pull_request.base.ref }} \ - --compare-sha ${{ github.event.pull_request.head.sha }} \ - --out-file $GITHUB_STEP_SUMMARY - - echo "⚠️ This check is testing a new version of 'Swagger LintDiff'." - echo "⚠️ Failures are expected, and should be completely ignored by spec authors and reviewers." - echo "⚠️ Meaningful results for this PR are are in required check 'Swagger LintDiff'." diff --git a/.github/workflows/oav-runner-tests.yaml b/.github/workflows/oav-runner-tests.yaml new file mode 100644 index 000000000000..21c02ef9d49e --- /dev/null +++ b/.github/workflows/oav-runner-tests.yaml @@ -0,0 +1,29 @@ +name: OAV Runner - Tests + +on: + push: + branches: + - main + pull_request: + paths: + - package-lock.json + - package.json + - tsconfig.json + - .github/shared + - .github/workflows/_reusable-eng-tools-test.yaml + - .github/workflows/oav-runner-test.yaml + - eng/tools/package.json + - eng/tools/tsconfig.json + - eng/tools/oav-runner/** + workflow_dispatch: + +permissions: + contents: read + +jobs: + oavrunnertests: + name: Check OAV Runner + uses: ./.github/workflows/_reusable-eng-tools-test.yaml + with: + package: oav-runner + lint: false diff --git a/.github/workflows/openapi-diff-runner-test.yaml b/.github/workflows/openapi-diff-runner-test.yaml new file mode 100644 index 000000000000..f78a78f72414 --- /dev/null +++ b/.github/workflows/openapi-diff-runner-test.yaml @@ -0,0 +1,28 @@ +name: openapi-diff-runner - Test + +on: + push: + branches: + - main + pull_request: + paths: + - package-lock.json + - package.json + - tsconfig.json + - .github/workflows/_reusable-eng-tools-test.yaml + - .github/workflows/openapi-diff-runner-test.yaml + - eng/tools/package.json + - eng/tools/tsconfig.json + - eng/tools/openapi-diff-runner/** + workflow_dispatch: + +permissions: + contents: read + +jobs: + openapiDiffRunner: + name: openapi-diff-runner + uses: ./.github/workflows/_reusable-eng-tools-test.yaml + with: + package: openapi-diff-runner + lint: false diff --git a/.github/workflows/post-apiview.yml b/.github/workflows/post-apiview.yml new file mode 100644 index 000000000000..1f08b8c4bf12 --- /dev/null +++ b/.github/workflows/post-apiview.yml @@ -0,0 +1,29 @@ +name: After APIView + +on: + check_run: + types: [completed] + +permissions: + pull-requests: write + contents: read + +jobs: + post-apiview: + name: After APIView + runs-on: ubuntu-24.04 + if: | + github.event.check_run.check_suite.app.name == 'Azure Pipelines' && ( + contains(github.event.check_run.name, 'APIView') || + contains(github.event.check_run.name, 'SDK Validation') ) + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + sparse-checkout: "eng/common" + + - name: Create APIView Comment on PR + run: | + . "eng/common/scripts/Helpers/ApiView-Helpers.ps1" + Set-ApiViewCommentForRelatedIssues -HeadCommitish ${{ github.event.check_run.head_sha }} -AuthToken ${{ secrets.GITHUB_TOKEN }} + shell: pwsh diff --git a/.github/workflows/sdk-breaking-change-labels.yaml b/.github/workflows/sdk-breaking-change-labels.yaml index 7f3887a2506f..013a43ee8dab 100644 --- a/.github/workflows/sdk-breaking-change-labels.yaml +++ b/.github/workflows/sdk-breaking-change-labels.yaml @@ -1,4 +1,4 @@ -name: SDK Breaking Change Labels (Preview) +name: SDK Breaking Change Labels on: check_run: @@ -6,18 +6,39 @@ on: permissions: contents: read + pull-requests: read + id-token: write jobs: sdk-breaking-change-labels: - if: ${{ contains(github.event.check_run.name, 'SDK Generation') && github.event.check_run.check_suite.app.name == 'Azure Pipelines'}} - name: SDK Breaking Change Labels (Preview) + # Only run this job when the check run is 'SDK Validation *' + if: | + github.event.check_run.check_suite.app.name == 'Azure Pipelines' && + contains(github.event.check_run.name, 'SDK Validation') + name: SDK Breaking Change Labels runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 with: sparse-checkout: | .github - + + # Only run the login and get token steps when the respository is azure-rest-api-specs-pr + - if: github.event.repository.name == 'azure-rest-api-specs-pr' + name: Azure Login with Workload Identity Federation + uses: azure/login@v2 + with: + client-id: "936c56f0-298b-467f-b702-3ad5bf4b15c1" + tenant-id: "72f988bf-86f1-41af-91ab-2d7cd011db47" + allow-no-subscriptions: true + + - if: github.event.repository.name == 'azure-rest-api-specs-pr' + name: Get ADO Token via Managed Identity + run: | + # Get token for Azure DevOps resource + ADO_TOKEN=$(az account get-access-token --resource "499b84ac-1321-427f-aa17-267ca6975798" --query "accessToken" -o tsv) + echo "ADO_TOKEN=$ADO_TOKEN" >> $GITHUB_ENV + - name: Get label and action id: get-label-and-action uses: actions/github-script@v7 @@ -38,8 +59,9 @@ jobs: value: "${{ fromJson(steps.get-label-and-action.outputs.result).labelAction == 'add' }}" - if: | - (fromJson(steps.get-label-and-action.outputs.result).labelAction == 'add' || - fromJson(steps.get-label-and-action.outputs.result).labelAction == 'remove') + ((fromJson(steps.get-label-and-action.outputs.result).labelAction == 'add' || + fromJson(steps.get-label-and-action.outputs.result).labelAction == 'remove') && + fromJson(steps.get-label-and-action.outputs.result).issueNumber > 0) name: Upload artifact with issue number uses: ./.github/actions/add-empty-artifact with: diff --git a/.github/workflows/specs-model-test.yaml b/.github/workflows/sdk-suppressions-test.yaml similarity index 64% rename from .github/workflows/specs-model-test.yaml rename to .github/workflows/sdk-suppressions-test.yaml index e037e01d5e16..c3616dc3713f 100644 --- a/.github/workflows/specs-model-test.yaml +++ b/.github/workflows/sdk-suppressions-test.yaml @@ -1,4 +1,4 @@ -name: Specs Model - Test +name: sdk-suppressions - Test on: push: @@ -10,21 +10,20 @@ on: - package-lock.json - package.json - tsconfig.json + - .github/shared - .github/workflows/_reusable-eng-tools-test.yaml - - .github/workflows/specs-model-test.yaml + - .github/workflows/sdk-suppressions-test.yaml - eng/tools/package.json - eng/tools/tsconfig.json - - eng/tools/specs-model/** + - eng/tools/sdk-suppressions/** workflow_dispatch: permissions: contents: read jobs: - specsModel: - name: Specs Model + sdkSuppressions: + name: sdk-suppressions uses: ./.github/workflows/_reusable-eng-tools-test.yaml with: - package: specs-model - lint: true - prettier: true + package: sdk-suppressions diff --git a/.github/workflows/spec-gen-sdk-runner-test.yaml b/.github/workflows/spec-gen-sdk-runner-test.yaml index a53066f95627..cd7b49eb7d03 100644 --- a/.github/workflows/spec-gen-sdk-runner-test.yaml +++ b/.github/workflows/spec-gen-sdk-runner-test.yaml @@ -27,6 +27,3 @@ jobs: with: package: spec-gen-sdk-runner lint: true - sparse-checkout-paths: | - specification/common-types - specification/contosowidgetmanager diff --git a/.github/workflows/spec-gen-sdk-status.yml b/.github/workflows/spec-gen-sdk-status.yml new file mode 100644 index 000000000000..cc409df325e7 --- /dev/null +++ b/.github/workflows/spec-gen-sdk-status.yml @@ -0,0 +1,49 @@ +name: "SDK Validation Status" + +on: + check_run: + types: [completed] + +permissions: + contents: read + checks: read + statuses: write + id-token: write + +jobs: + sdk-validation-status: + if: | + github.event.check_run.check_suite.app.name == 'Azure Pipelines' && + contains(github.event.check_run.name, 'SDK Validation') + name: "SDK Validation Status" + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + .github + + # Only run the login and get token steps when the respository is azure-rest-api-specs-pr + - if: github.event.repository.name == 'azure-rest-api-specs-pr' + name: Azure Login with Workload Identity Federation + uses: azure/login@v2 + with: + client-id: "936c56f0-298b-467f-b702-3ad5bf4b15c1" + tenant-id: "72f988bf-86f1-41af-91ab-2d7cd011db47" + allow-no-subscriptions: true + + - if: github.event.repository.name == 'azure-rest-api-specs-pr' + name: Get ADO Token via Managed Identity + run: | + # Get token for Azure DevOps resource + ADO_TOKEN=$(az account get-access-token --resource "499b84ac-1321-427f-aa17-267ca6975798" --query "accessToken" -o tsv) + echo "ADO_TOKEN=$ADO_TOKEN" >> $GITHUB_ENV + + - name: "SDK Validation Set Status" + id: sdk-validation-status + uses: actions/github-script@v7 + with: + script: | + const setStatus = + (await import('${{ github.workspace }}/.github/workflows/src/spec-gen-sdk-status.js')).default; + return await setStatus({ github, context, core }); diff --git a/.github/workflows/src/arm-auto-signoff.js b/.github/workflows/src/arm-auto-signoff.js index f32f41720ca0..0f5b280fe7dc 100644 --- a/.github/workflows/src/arm-auto-signoff.js +++ b/.github/workflows/src/arm-auto-signoff.js @@ -1,6 +1,6 @@ // @ts-check -import { setEquals } from "../../src/equality.js"; +import { setEquals } from "../../shared/src/equality.js"; import { extractInputs } from "./context.js"; import { PER_PAGE_MAX } from "./github.js"; import { LabelAction } from "./label.js"; @@ -46,14 +46,7 @@ export default async function getLabelAction({ github, context, core }) { * @param {typeof import("@actions/core")} params.core * @returns {Promise<{labelAction: LabelAction, issueNumber: number}>} */ -export async function getLabelActionImpl({ - owner, - repo, - issue_number, - head_sha, - github, - core, -}) { +export async function getLabelActionImpl({ owner, repo, issue_number, head_sha, github, core }) { const labelActions = { [LabelAction.None]: { labelAction: LabelAction.None, @@ -86,16 +79,13 @@ export async function getLabelActionImpl({ core.info(`Labels: ${labelNames}`); - const workflowRuns = await github.paginate( - github.rest.actions.listWorkflowRunsForRepo, - { - owner, - repo, - event: "pull_request", - head_sha, - per_page: PER_PAGE_MAX, - }, - ); + const workflowRuns = await github.paginate(github.rest.actions.listWorkflowRunsForRepo, { + owner, + repo, + event: "pull_request", + head_sha, + per_page: PER_PAGE_MAX, + }); core.info("Workflow Runs:"); workflowRuns.forEach((wf) => { @@ -106,10 +96,7 @@ export async function getLabelActionImpl({ const incrementalTspRuns = workflowRuns .filter((wf) => wf.name == wfName) // Sort by "updated_at" descending - .sort( - (a, b) => - new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(), - ); + .sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()); if (incrementalTspRuns.length == 0) { core.info( @@ -122,29 +109,22 @@ export async function getLabelActionImpl({ if (run.status == "completed") { if (run.conclusion != "success") { - core.info( - `Run for workflow '${wfName}' did not succeed: '${run.conclusion}'`, - ); + core.info(`Run for workflow '${wfName}' did not succeed: '${run.conclusion}'`); return removeAction; } - const artifacts = await github.paginate( - github.rest.actions.listWorkflowRunArtifacts, - { - owner, - repo, - run_id: run.id, - per_page: PER_PAGE_MAX, - }, - ); + const artifacts = await github.paginate(github.rest.actions.listWorkflowRunArtifacts, { + owner, + repo, + run_id: run.id, + per_page: PER_PAGE_MAX, + }); const artifactNames = artifacts.map((a) => a.name); core.info(`artifactNames: ${JSON.stringify(artifactNames)}`); if (artifactNames.includes("incremental-typespec=false")) { - core.info( - "Spec is not an incremental change to an existing TypeSpec RP", - ); + core.info("Spec is not an incremental change to an existing TypeSpec RP"); return removeAction; } else if (artifactNames.includes("incremental-typespec=true")) { core.info("Spec is an incremental change to an existing TypeSpec RP"); @@ -154,9 +134,7 @@ export async function getLabelActionImpl({ throw `Workflow artifacts did not contain 'incremental-typespec': ${JSON.stringify(artifactNames)}`; } } else { - core.info( - `Workflow '${wfName}' is still in-progress: status='${run.status}'`, - ); + core.info(`Workflow '${wfName}' is still in-progress: status='${run.status}'`); return labelActions[LabelAction.None]; } } @@ -190,9 +168,7 @@ export async function getLabelActionImpl({ const matchingRuns = checkRuns.filter((run) => run.name === checkName); if (matchingRuns.length > 1) { - throw new Error( - `Unexpected number of checks named '${checkName}': ${matchingRuns.length}`, - ); + throw new Error(`Unexpected number of checks named '${checkName}': ${matchingRuns.length}`); } const matchingRun = matchingRuns.length === 1 ? matchingRuns[0] : undefined; @@ -201,11 +177,7 @@ export async function getLabelActionImpl({ `${checkName}: Status='${matchingRun?.status}', Conclusion='${matchingRun?.conclusion}'`, ); - if ( - matchingRun && - matchingRun.status === "completed" && - matchingRun.conclusion !== "success" - ) { + if (matchingRun && matchingRun.status === "completed" && matchingRun.conclusion !== "success") { core.info(`Check '${checkName}' did not succeed`); return removeAction; } @@ -216,13 +188,8 @@ export async function getLabelActionImpl({ } if ( - setEquals( - new Set(requiredCheckRuns.map((run) => run.name)), - new Set(requiredCheckNames), - ) && - requiredCheckRuns.every( - (run) => run.status === "completed" && run.conclusion === "success", - ) + setEquals(new Set(requiredCheckRuns.map((run) => run.name)), new Set(requiredCheckNames)) && + requiredCheckRuns.every((run) => run.status === "completed" && run.conclusion === "success") ) { core.info("All requirements met for auto-signoff"); return labelActions[LabelAction.Add]; diff --git a/.github/workflows/src/arm-incremental-typespec.js b/.github/workflows/src/arm-incremental-typespec.js index e9bcfbe97bf1..dcec173dd779 100644 --- a/.github/workflows/src/arm-incremental-typespec.js +++ b/.github/workflows/src/arm-incremental-typespec.js @@ -1,18 +1,22 @@ // @ts-check // For now, treat all paths as posix, since this is the format returned from git commands -import { dirname, join } from "path/posix"; +import debug from "debug"; +import { dirname, join, relative, resolve } from "path"; +import { simpleGit } from "simple-git"; import { example, getChangedFiles, readme, resourceManager, swagger, -} from "../../src/changed-files.js"; -import { lsTree, show } from "../../src/git.js"; -import { getInputFiles } from "../../src/readme.js"; +} from "../../shared/src/changed-files.js"; +import { Readme } from "../../shared/src/readme.js"; import { CoreLogger } from "./core-logger.js"; +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); + /** * @param {import('github-script').AsyncFunctionArguments} AsyncFunctionArguments * @returns {Promise} @@ -33,12 +37,14 @@ export default async function incrementalTypeSpec({ core }) { return false; } + const git = simpleGit(options.cwd); + // If any changed swagger file is not typespec-generated, return false for (const file of changedRmFiles.filter(swagger)) { /** @type string */ let swaggerText; try { - swaggerText = await show("HEAD", file, options); + swaggerText = await git.show([`HEAD:${file}`]); } catch (e) { if (e instanceof Error && e.message.includes("does not exist")) { // To simplify logic, if PR deletes a swagger file, it's not "incremental typespec" @@ -73,7 +79,7 @@ export default async function incrementalTypeSpec({ core }) { let readmeText; try { - readmeText = await show("HEAD", readmeFile, options); + readmeText = await git.show([`HEAD:${readmeFile}`]); } catch (e) { if (e instanceof Error && e.message.includes("does not exist")) { // To simplify logic, if PR deletes a readme file, it's not "incremental typespec" @@ -86,7 +92,14 @@ export default async function incrementalTypeSpec({ core }) { } // If a readme is changed, to be conservative, handle as if every input file in the readme were changed - const inputFiles = await getInputFiles(readmeText, options); + const readme = new Readme(resolve(options.cwd ?? "", readmeFile), { + content: readmeText, + logger: options.logger, + }); + const tags = await readme.getTags(); + const inputFiles = [...tags.values()].flatMap((t) => + [...t.inputFiles.keys()].map((p) => relative(dirname(readme.path), p)), + ); inputFiles.forEach((f) => { changedReadmeInputFiles.add(join(dirname(readmeFile), f)); @@ -95,9 +108,7 @@ export default async function incrementalTypeSpec({ core }) { const changedSpecDirs = new Set([ ...changedRmFiles.filter(swagger).map((f) => dirname(dirname(dirname(f)))), - ...changedRmFiles - .filter(example) - .map((f) => dirname(dirname(dirname(dirname(f))))), + ...changedRmFiles.filter(example).map((f) => dirname(dirname(dirname(dirname(f))))), // Readme input files should use the same path format as changed swagger files ...[...changedReadmeInputFiles].map((f) => dirname(dirname(dirname(f)))), ]); @@ -109,10 +120,13 @@ export default async function incrementalTypeSpec({ core }) { // Ensure that each changed spec dir contained at least one typespec-generated swagger in the base commitish for (const changedSpecDir of changedSpecDirs) { - const specFilesBaseBranch = await lsTree("HEAD^", changedSpecDir, { - args: "-r --name-only", - ...options, - }); + const specFilesBaseBranch = await git.raw([ + "ls-tree", + "-r", + "--name-only", + "HEAD^", + changedSpecDir, + ]); // Filter files to only include RM swagger files const specRmSwaggerFilesBaseBranch = specFilesBaseBranch @@ -129,7 +143,7 @@ export default async function incrementalTypeSpec({ core }) { let containsTypeSpecGeneratedSwagger = false; // TODO: Add lint rule to prevent using "for...in" instead of "for...of" for (const file of specRmSwaggerFilesBaseBranch) { - const baseSwagger = await show("HEAD^", file, options); + const baseSwagger = await git.show([`HEAD^:${file}`]); const baseSwaggerObj = JSON.parse(baseSwagger); if (baseSwaggerObj["info"]?.["x-typespec-generated"]) { core.info( @@ -148,8 +162,6 @@ export default async function incrementalTypeSpec({ core }) { } } - core.info( - "Appears to contain only incremental changes to existing TypeSpec RP(s)", - ); + core.info("Appears to contain only incremental changes to existing TypeSpec RP(s)"); return true; } diff --git a/.github/workflows/src/artifacts.js b/.github/workflows/src/artifacts.js new file mode 100644 index 000000000000..035eb210f601 --- /dev/null +++ b/.github/workflows/src/artifacts.js @@ -0,0 +1,199 @@ +// @ts-check +import { fetchWithRetry } from "./retries.js"; + +/** + * @typedef {Object} ArtifactResource + * @property {string} [downloadUrl] + */ + +/** + * @typedef {Object} ArtifactValue + * @property {string} name - The name of the artifact + * @property {string} [id] - The ID of the artifact + * @property {ArtifactResource} [resource] - The resource containing download information + */ + +/** + * @typedef {Object} Artifacts + * @property {ArtifactResource} [resource] - For single artifact responses + */ + +/** + * @typedef {Object} ListArtifactsResponse + * @property {Array} value + */ + +/** + * @param {Object} params + * @param {string} params.ado_build_id + * @param {string} params.ado_project_url + * @param {string} params.artifactName + * @param {string} params.artifactFileName + * @param {typeof import("@actions/core")} params.core + * @param {import('./retries.js').RetryOptions} [params.retryOptions] + * @param {boolean} [params.fallbackToFailedArtifact] + * @param {string} [params.token] + * @returns {Promise<{artifactData: string}>} + */ +export async function getAzurePipelineArtifact({ + ado_build_id, + ado_project_url, + artifactName, + artifactFileName, + core, + retryOptions = {}, + fallbackToFailedArtifact = false, + token = undefined, +}) { + let apiUrl = `${ado_project_url}/_apis/build/builds/${ado_build_id}/artifacts?artifactName=${artifactName}&api-version=7.0`; + core.info(`Calling Azure DevOps API to get the artifact: ${apiUrl}`); + + const headers = { + "Content-Type": "application/json", + ...(token && { Authorization: `Bearer ${token}` }), + }; + let artifactData = ""; + // Use Node.js fetch with retry to call the API + let response = await fetchWithRetry( + apiUrl, + { + method: "GET", + headers, + }, + retryOptions, + ); + + // If the response is 404, check if we should fallback to the failed artifact + if (response.status === 404) { + if (!fallbackToFailedArtifact) { + core.info(`Artifact '${artifactName}' not found (404)`); + return { artifactData }; + } else { + response = await fetchFailedArtifact({ + ado_build_id, + ado_project_url, + artifactName, + core, + retryOptions, + headers, + }); + } + } + + if (response.ok) { + // Step 1: Get the download URL for the artifact + /** @type {Artifacts} */ + const artifacts = /** @type {Artifacts} */ (await response.json()); + core.info(`Artifacts found: ${JSON.stringify(artifacts)}`); + if (!artifacts.resource || !artifacts.resource.downloadUrl) { + throw new Error(`Download URL not found for the artifact ${artifactName}`); + } + + let downloadUrl = artifacts.resource.downloadUrl; + const index = downloadUrl.indexOf("?format=zip"); + if (index !== -1) { + // Keep everything up to (but not including) "?format=zip" + downloadUrl = downloadUrl.substring(0, index); + } + downloadUrl += `?format=file&subPath=/${artifactFileName}`; + core.info(`Downloading artifact from: ${downloadUrl}`); + + // Step 2: Fetch Artifact Content (as a Buffer) with retry + const artifactResponse = await fetchWithRetry( + downloadUrl, + { + method: "GET", + headers, + }, + retryOptions, + ); + if (!artifactResponse.ok) { + throw new Error(`Failed to fetch artifact: ${artifactResponse.statusText}`); + } + + artifactData = await artifactResponse.text(); + } else { + core.error(`Failed to fetch artifacts: ${response.status}, ${response.statusText}`); + const errorText = await response.text(); + core.error(`Error details: ${errorText}`); + } + return { artifactData }; +} + +/** + * Extracts the ADO build ID and project URL from the given build URL. + * @param {string} buildUrl + * @returns {{projectUrl: string, buildId: string}} + * @throws {Error} If the build URL does not match the expected format. + */ +export function getAdoBuildInfoFromUrl(buildUrl) { + // Extract the ADO build ID and project URL from the check run details URL + const buildUrlRegex = /^(.*?)(?=\/_build\/).*?[?&]buildId=(\d+)/; + const match = buildUrl.match(buildUrlRegex); + if (!match) { + throw new Error(`Could not extract build ID or project URL from the URL: ${buildUrl}`); + } + return { projectUrl: match[1], buildId: match[2] }; +} + +/** + * @param {Object} params + * @param {string} params.ado_build_id + * @param {string} params.ado_project_url + * @param {string} params.artifactName + * @param {typeof import("@actions/core")} params.core + * @param {import('./retries.js').RetryOptions} [params.retryOptions] + * @param {Object} [params.headers] + * @returns {Promise} + */ +export async function fetchFailedArtifact({ + ado_build_id, + ado_project_url, + artifactName, + core, + retryOptions = {}, + headers, +}) { + // fallback to fetch the failed artifact + let apiUrl = `${ado_project_url}/_apis/build/builds/${ado_build_id}/artifacts?api-version=7.0`; + core.info(`List the artifacts: ${apiUrl}`); + let response = await fetchWithRetry( + apiUrl, + { + method: "GET", + headers, + }, + retryOptions, + ); + if (!response.ok) { + throw new Error(`Failed to fetch artifacts: ${response.status}, ${response.statusText}`); + } + /** @type {ListArtifactsResponse} */ + const listArtifactResponse = /** @type {ListArtifactsResponse} */ (await response.json()); + core.info(`Artifacts found: ${JSON.stringify(listArtifactResponse)}`); + // Use filter to get matching artifacts and sort them in descending alphabetical order + const artifactsList = listArtifactResponse.value + .filter((artifact) => artifact.name.includes(artifactName)) + .sort((a, b) => b.name.localeCompare(a.name)); // Descending order (Z to A) + if (artifactsList.length === 0) { + const message = `No artifacts found with name containing ${artifactName}`; + core.warning(message); + // Return a Response-like object using the global Response constructor + return new Response(message, { + status: 404, + statusText: message, + headers: { "Content-Type": "text/plain" }, + }); + } + artifactName = artifactsList[0].name; + apiUrl = `${ado_project_url}/_apis/build/builds/${ado_build_id}/artifacts?artifactName=${artifactName}&api-version=7.0`; + core.info(`Fetching the failed artifact: ${apiUrl}`); + return await fetchWithRetry( + apiUrl, + { + method: "GET", + headers, + }, + retryOptions, + ); +} diff --git a/.github/workflows/src/context.js b/.github/workflows/src/context.js index 888e60808303..5948aab82335 100644 --- a/.github/workflows/src/context.js +++ b/.github/workflows/src/context.js @@ -3,6 +3,11 @@ import { PER_PAGE_MAX } from "./github.js"; import { getIssueNumber } from "./issues.js"; +/** + * @typedef {import('@octokit/plugin-rest-endpoint-methods').RestEndpointMethodTypes} RestEndpointMethodTypes + * @typedef {RestEndpointMethodTypes["repos"]["listPullRequestsAssociatedWithCommit"]["response"]["data"][number]} PullRequest + */ + /** * Extracts inputs from context based on event name and properties. * run_id is only defined for "workflow_run:completed" events. @@ -10,21 +15,19 @@ import { getIssueNumber } from "./issues.js"; * @param {import('github-script').AsyncFunctionArguments['github']} github * @param {import('github-script').AsyncFunctionArguments['context']} context * @param {import('github-script').AsyncFunctionArguments['core']} core - * @returns {Promise<{owner: string, repo: string, head_sha: string, issue_number: number, run_id: number, ado_project_url?: string, ado_build_id?: string }>} + * @returns {Promise<{owner: string, repo: string, head_sha: string, issue_number: number, run_id: number, details_url?: string }>} */ export async function extractInputs(github, context, core) { core.info("extractInputs()"); core.info(` eventName: ${context.eventName}`); core.info(` payload.action: ${context.eventName}`); - core.info( - ` payload.workflow_run.event: ${context.payload.workflow_run?.event || "undefined"}`, - ); + core.info(` payload.workflow_run.event: ${context.payload.workflow_run?.event || "undefined"}`); // Log full context when debug is enabled. Most workflows should be idempotent and can be re-run // with debug enabled to replay the previous context. core.isDebug() && core.debug(`context: ${JSON.stringify(context)}`); - /** @type {{ owner: string, repo: string, head_sha: string, issue_number: number, run_id: number, ado_project_url?: string, ado_build_id?: string }} */ + /** @type {{ owner: string, repo: string, head_sha: string, issue_number: number, run_id: number, details_url?: string }} */ let inputs; // Add support for more event types as needed @@ -32,15 +35,17 @@ export async function extractInputs(github, context, core) { context.eventName === "pull_request" || (context.eventName === "pull_request_target" && // "pull_request_target" is particularly dangerous, so only support actions as needed - (context.payload.action === "labeled" || + (context.payload.action === "opened" || + context.payload.action === "synchronize" || + context.payload.action === "reopened" || + context.payload.action === "labeled" || context.payload.action === "unlabeled")) ) { // Most properties on payload should be the same for both pull_request and pull_request_target - const payload = - /** @type {import("@octokit/webhooks-types").PullRequestEvent} */ ( - context.payload - ); + const payload = /** @type {import("@octokit/webhooks-types").PullRequestEvent} */ ( + context.payload + ); inputs = { owner: payload.repository.owner.login, @@ -49,14 +54,10 @@ export async function extractInputs(github, context, core) { issue_number: payload.pull_request.number, run_id: NaN, }; - } else if ( - context.eventName === "issue_comment" && - context.payload.action === "edited" - ) { - const payload = - /** @type {import("@octokit/webhooks-types").IssueCommentEditedEvent} */ ( - context.payload - ); + } else if (context.eventName === "issue_comment" && context.payload.action === "edited") { + const payload = /** @type {import("@octokit/webhooks-types").IssueCommentEditedEvent} */ ( + context.payload + ); const owner = payload.repository.owner.login; const repo = payload.repository.name; @@ -76,10 +77,9 @@ export async function extractInputs(github, context, core) { run_id: NaN, }; } else if (context.eventName === "workflow_dispatch") { - const payload = - /** @type {import("@octokit/webhooks-types").WorkflowDispatchEvent} */ ( - context.payload - ); + const payload = /** @type {import("@octokit/webhooks-types").WorkflowDispatchEvent} */ ( + context.payload + ); inputs = { owner: payload.repository.owner.login, repo: payload.repository.name, @@ -87,14 +87,10 @@ export async function extractInputs(github, context, core) { issue_number: NaN, run_id: NaN, }; - } else if ( - context.eventName === "workflow_run" && - context.payload.action === "completed" - ) { - const payload = - /** @type {import("@octokit/webhooks-types").WorkflowRunCompletedEvent} */ ( - context.payload - ); + } else if (context.eventName === "workflow_run" && context.payload.action === "completed") { + const payload = /** @type {import("@octokit/webhooks-types").WorkflowRunCompletedEvent} */ ( + context.payload + ); let issue_number = NaN; @@ -127,18 +123,34 @@ export async function extractInputs(github, context, core) { const head_repo = payload.workflow_run.head_repository.name; const head_sha = payload.workflow_run.head_sha; - core.info( - `listPullRequestsAssociatedWithCommit(${head_owner}, ${head_repo}, ${head_sha})`, - ); - const pullRequests = await github.paginate( - github.rest.repos.listPullRequestsAssociatedWithCommit, - { - owner: head_owner, - repo: head_repo, - commit_sha: head_sha, - per_page: PER_PAGE_MAX, - }, - ); + /** @type {PullRequest[]} */ + let pullRequests = []; + + try { + core.info( + `listPullRequestsAssociatedWithCommit(${head_owner}, ${head_repo}, ${head_sha})`, + ); + pullRequests = ( + await github.paginate(github.rest.repos.listPullRequestsAssociatedWithCommit, { + owner: head_owner, + repo: head_repo, + commit_sha: head_sha, + per_page: PER_PAGE_MAX, + }) + ).filter( + // Only include PRs to the same repo as the triggering workflow. + // + // Other unique keys like "full_name" should also work, but "id" is the safest since it's + // supposed to be guaranteed unique and never change (repos can be renamed or change owners). + (pr) => pr.base.repo.id === payload.workflow_run.repository.id, + ); + } catch (error) { + // Short message always + core.info(`Error: ${error instanceof Error ? error.message : "unknown"}`); + + // Long message only in debug + core.debug(`Error: ${error}`); + } if (pullRequests.length === 0) { // There are three cases where the "commits" REST API called above can return @@ -151,8 +163,7 @@ export async function extractInputs(github, context, core) { // // In any case, the solution is to fall back to the (lower-rate-limit) search API. // The search API is confirmed to work in case #1, but has not been tested in #2 or #3. - issue_number = (await getIssueNumber({ head_sha, github, core })) - .issueNumber; + issue_number = (await getIssueNumber({ head_sha, github, core })).issueNumber; } else if (pullRequests.length === 1) { issue_number = pullRequests[0].number; } else { @@ -173,15 +184,12 @@ export async function extractInputs(github, context, core) { ) { // Attempt to extract issue number from artifact. This can be trusted, because it was uploaded from a workflow that is trusted, // because "issue_comment" and "workflow_run" only trigger on workflows in the default branch. - const artifacts = await github.paginate( - github.rest.actions.listWorkflowRunArtifacts, - { - owner: payload.workflow_run.repository.owner.login, - repo: payload.workflow_run.repository.name, - run_id: payload.workflow_run.id, - per_page: PER_PAGE_MAX, - }, - ); + const artifacts = await github.paginate(github.rest.actions.listWorkflowRunArtifacts, { + owner: payload.workflow_run.repository.owner.login, + repo: payload.workflow_run.repository.name, + run_id: payload.workflow_run.id, + per_page: PER_PAGE_MAX, + }); const artifactNames = artifacts.map((a) => a.name); @@ -202,9 +210,7 @@ export async function extractInputs(github, context, core) { issue_number = parsedValue; continue; } else { - throw new Error( - `Invalid issue-number: '${value}' parsed to '${parsedValue}'`, - ); + throw new Error(`Invalid issue-number: '${value}' parsed to '${parsedValue}'`); } } } @@ -229,31 +235,31 @@ export async function extractInputs(github, context, core) { }; } else if (context.eventName === "check_run") { let checkRun = context.payload.check_run; - - // Extract the ADO build ID and project URL from the check run details URL - const buildUrlRegex = /^(.*?)(?=\/_build\/).*?[?&]buildId=(\d+)/; - const match = checkRun.details_url.match(buildUrlRegex); - if (!match) { - throw new Error( - `Could not extract build ID or project URL from check run details URL: ${checkRun.details_url}`, - ); - } - if ( - !context.payload.repository || - !context.payload.repository.owner || - !context.payload.repository.owner.login || - !context.payload.repository.name - ) { - throw new Error( - `Could not extract repository owner or name from context payload: ${JSON.stringify(context.payload.repository)}`, - ); - } + const payload = /** @type {import("@octokit/webhooks-types").CheckRunEvent} */ ( + context.payload + ); + const repositoryInfo = getRepositoryInfo(payload.repository); inputs = { - owner: context.payload.repository.owner.login, - repo: context.payload.repository.name, + owner: repositoryInfo.owner, + repo: repositoryInfo.repo, head_sha: checkRun.head_sha, - ado_build_id: match[2], - ado_project_url: match[1], + details_url: checkRun.details_url, + issue_number: NaN, + run_id: NaN, + }; + } else if (context.eventName === "check_suite" && context.payload.action === "completed") { + const payload = /** @type {import("@octokit/webhooks-types").CheckSuiteCompletedEvent} */ ( + context.payload + ); + + const repositoryInfo = getRepositoryInfo(payload.repository); + inputs = { + owner: repositoryInfo.owner, + repo: repositoryInfo.repo, + head_sha: payload.check_suite.head_sha, + + // These are NaN today because the only consumer of this event needs only + // the head_sha issue_number: NaN, run_id: NaN, }; @@ -266,3 +272,20 @@ export async function extractInputs(github, context, core) { core.info(`inputs: ${JSON.stringify(inputs)}`); return inputs; } + +/** + * @param {import("@octokit/webhooks-types").Repository | undefined} repository + * @returns {{ owner: string, repo: string }} + */ +function getRepositoryInfo(repository) { + if (!repository || !repository.owner || !repository.owner.login || !repository.name) { + throw new Error( + `Could not extract repository owner or name from context payload: ${JSON.stringify(repository)}`, + ); + } + + return { + owner: repository.owner.login, + repo: repository.name, + }; +} diff --git a/.github/workflows/src/core-logger.js b/.github/workflows/src/core-logger.js index 66cb8786dea2..a0c6f7d03212 100644 --- a/.github/workflows/src/core-logger.js +++ b/.github/workflows/src/core-logger.js @@ -1,7 +1,7 @@ // @ts-check /** - * @typedef {import('../../src/types.js').ILogger} ILogger + * @typedef {import('../../shared/src/logger.js').ILogger} ILogger */ /** @@ -25,6 +25,13 @@ export class CoreLogger { this.#core.debug(message); } + /** + * @param {string} message + */ + error(message) { + this.#core.error(message); + } + /** * @param {string} message */ diff --git a/.github/workflows/src/github-test.js b/.github/workflows/src/github-test.js index 06bea6d1bcab..a92da7684039 100644 --- a/.github/workflows/src/github-test.js +++ b/.github/workflows/src/github-test.js @@ -18,10 +18,7 @@ export default async function importAllModules({ core }) { // find all files matching "**/src/**/*.js", sorted for readability const scriptFiles = (await readdir(githubDir, { recursive: true })) - .filter( - (f) => - normalize(f).split(sep).includes("src") && basename(f).endsWith(".js"), - ) + .filter((f) => normalize(f).split(sep).includes("src") && basename(f).endsWith(".js")) .sort(); core.info("Script Files:"); diff --git a/.github/workflows/src/github.js b/.github/workflows/src/github.js index 6b3420832458..b02dea5b1f55 100644 --- a/.github/workflows/src/github.js +++ b/.github/workflows/src/github.js @@ -1,3 +1,222 @@ // @ts-check +import { byDate, invert } from "../../shared/src/sort.js"; + +/** + * @typedef {import('@octokit/plugin-rest-endpoint-methods').RestEndpointMethodTypes} RestEndpointMethodTypes + * @typedef {RestEndpointMethodTypes["checks"]["listForRef"]["response"]["data"]["check_runs"]} CheckRuns + * @typedef {RestEndpointMethodTypes["actions"]["listWorkflowRunsForRepo"]["response"]["data"]["workflow_runs"]} WorkflowRuns + * @typedef {RestEndpointMethodTypes["repos"]["listCommitStatusesForRef"]["response"]["data"]} CommitStatuses + */ + export const PER_PAGE_MAX = 100; + +/** + * https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks#check-statuses-and-conclusions + */ +export const CheckStatus = { + /** + * @type {"completed"} + * @description The check run completed and has a conclusion. + */ + COMPLETED: "completed", + /** + * @type {"expected"} + * @description The check run is waiting for a status to be reported. + */ + EXPECTED: "expected", + /** + * @type {"failure"} + * @description The check run failed. + */ + FAILURE: "failure", + /** + * @type {"in_progress"} + * @description The check run is in progress. + */ + IN_PROGRESS: "in_progress", + /** + * @type {"pending"} + * @description The check run is at the front of the queue but the group-based concurrency limit has been reached. + */ + PENDING: "pending", + /** + * @type {"queued"} + * @description The check run has been queued. + */ + QUEUED: "queued", + /** + * @type {"requested"} + * @description The check run has been created but has not been queued. + */ + REQUESTED: "requested", + /** + * @type {"startup_failure"} + * @description The check suite failed during startup. This status is not applicable to check runs. + */ + STARTUP_FAILURE: "startup_failure", + /** + * @type {"waiting"} + * @description The check run is waiting for a deployment protection rule to be satisfied. + */ + WAITING: "waiting", +}; + +/** + * https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks#check-statuses-and-conclusions + */ +export const CheckConclusion = { + /** + * @type {"action_required"} + * @description The check run provided required actions upon its completion. For more information, see Using the REST API to interact with checks. + */ + ACTION_REQUIRED: "action_required", + /** + * @type {"cancelled"} + * @description The check run was cancelled before it completed. + */ + CANCELLED: "cancelled", + /** + * @type {"failure"} + * @description The check run failed. + */ + FAILURE: "failure", + /** + * @type {"neutral"} + * @description The check run completed with a neutral result. This is treated as a success for dependent checks in GitHub Actions. + */ + NEUTRAL: "neutral", + /** + * @type {"skipped"} + * @description The check run was skipped. This is treated as a success for dependent checks in GitHub Actions. + */ + SKIPPED: "skipped", + /** + * @type {"stale"} + * @description The check run was marked stale by GitHub because it took too long. + */ + STALE: "stale", + /** + * @type {"success"} + * @description The check run completed successfully. + */ + SUCCESS: "success", + /** + * @type {"timed_out"} + * @description The check run timed out. + */ + TIMED_OUT: "timed_out", +}; + +/** + * https://docs.github.com/en/rest/commits/statuses?apiVersion=2022-11-28#create-a-commit-status--parameters + */ +export const CommitStatusState = { + /** + * @type {"error"} + */ + ERROR: "error", + /** + * @type {"failure"} + */ + FAILURE: "failure", + /** + * @type {"pending"} + */ + PENDING: "pending", + /** + * @type {"success"} + */ + SUCCESS: "success", +}; + +/** + * Writes content to the GitHub Actions summary + * @param {string} content - Markdown content to add to the summary + * @param {typeof import("@actions/core")} core - GitHub Actions core library + */ +export async function writeToActionsSummary(content, core) { + try { + await core.summary.addRaw(content).write(); + core.info("Successfully wrote to the GitHub Actions summary"); + } catch (error) { + throw new Error(`Failed to write to the GitHub Actions summary: ${error}`); + } +} + +/** + * Returns the check with the given checkRunName for the given ref. + * @param {import('github-script').AsyncFunctionArguments['github']} github + * @param {import('github-script').AsyncFunctionArguments['context']} context + * @param {string} checkRunName + * @param {string} ref + * @returns {Promise} + */ +export async function getCheckRuns(github, context, checkRunName, ref) { + const result = await github.paginate(github.rest.checks.listForRef, { + ...context.repo, + ref: ref, + check_name: checkRunName, + status: "completed", + per_page: PER_PAGE_MAX, + }); + + /* v8 ignore next */ + return result.sort( + invert( + byDate((run) => { + if (run.completed_at === null) { + // completed_at should never be null because status is "completed" + throw new Error(`Unexpected value of run.completed_at: '${run.completed_at}'`); + } else { + return run.completed_at; + } + }), + ), + ); +} + +/** + * Returns the check with the given checkRunName for the given ref. + * @param {import('github-script').AsyncFunctionArguments['github']} github + * @param {import('github-script').AsyncFunctionArguments['context']} context + * @param {string} commitStatusName + * @param {string} ref + * @returns {Promise} + */ +export async function getCommitStatuses(github, context, commitStatusName, ref) { + const result = await github.paginate(github.rest.repos.listCommitStatusesForRef, { + ...context.repo, + ref: ref, + per_page: PER_PAGE_MAX, + }); + + return result + .filter( + (status) => + // Property "context" is case-insensitive + status.context.toLowerCase() === commitStatusName.toLowerCase(), + ) + .sort(invert(byDate((status) => status.updated_at))); +} + +/** + * Returns the workflow run with the given workflowName for the given ref. + * @param {import('github-script').AsyncFunctionArguments['github']} github + * @param {import('github-script').AsyncFunctionArguments['context']} context + * @param {string} workflowName + * @param {string} ref + * @returns {Promise} + */ +export async function getWorkflowRuns(github, context, workflowName, ref) { + const result = await github.paginate(github.rest.actions.listWorkflowRunsForRepo, { + ...context.repo, + head_sha: ref, + status: "completed", + per_page: PER_PAGE_MAX, + }); + + return result + .filter((run) => run.name === workflowName) + .sort(invert(byDate((run) => run.updated_at))); +} diff --git a/.github/workflows/src/issues.js b/.github/workflows/src/issues.js index 21e632798be0..98ae32234616 100644 --- a/.github/workflows/src/issues.js +++ b/.github/workflows/src/issues.js @@ -1,3 +1,5 @@ +// @ts-check + /** * Retrieves the PR number associated with a specific commit SHA * @param {Object} params @@ -23,16 +25,12 @@ export async function getIssueNumber({ head_sha, core, github }) { const totalCount = searchResponse.data.total_count; const itemsCount = searchResponse.data.items.length; - core.info( - `Search results: ${totalCount} total matches, ${itemsCount} items returned`, - ); + core.info(`Search results: ${totalCount} total matches, ${itemsCount} items returned`); if (itemsCount > 0) { const firstItem = searchResponse.data.items[0]; issueNumber = firstItem.number; - core.info( - `Found the first matched PR #${issueNumber}: ${firstItem.html_url}`, - ); + core.info(`Found the first matched PR #${issueNumber}: ${firstItem.html_url}`); if (itemsCount > 1) { core.warning( diff --git a/.github/workflows/src/label.js b/.github/workflows/src/label.js index 81d33237be19..03d879e00594 100644 --- a/.github/workflows/src/label.js +++ b/.github/workflows/src/label.js @@ -15,3 +15,10 @@ export const LabelAction = Object.freeze({ Remove: "remove", }); // @ts-check + +export const Label = { + /** + * @type {"Approved-Avocado"} + */ + APPROVED_AVOCADO: "Approved-Avocado", +}; diff --git a/.github/workflows/src/retries.js b/.github/workflows/src/retries.js new file mode 100644 index 000000000000..9fc20a2988df --- /dev/null +++ b/.github/workflows/src/retries.js @@ -0,0 +1,56 @@ +// @ts-check + +/** + * @typedef {Object} RetryOptions + * @property {number} [maxRetries] Default: 3 + * @property {number} [initialDelayMs] Default: 1000 + * @property {number} [maxDelayMs] - Default: 10000 + * @property {Function} [logger] - Default: console.log + */ + +/** + * Retry a function with exponential backoff + * @param {Function} fn - Function to retry + * @param {RetryOptions} [options] - Retry options + * @returns {Promise} - Result of the function + */ +export async function retry(fn, options = {}) { + const { + maxRetries = 3, + initialDelayMs = 1000, + maxDelayMs = 10000, + logger = console.log, + } = options; + + let lastError; + + for (let attempt = 0; attempt < maxRetries + 1; attempt++) { + try { + return await fn(); + } catch (error) { + lastError = error; + + if (attempt < maxRetries) { + const delayMs = Math.min(initialDelayMs * Math.pow(2, attempt), maxDelayMs); + logger(`Request failed, retrying in ${delayMs}ms... (${attempt + 1}/${maxRetries})`); + if (error instanceof Error) { + logger(`Error: ${error.message}`); + } + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + } + + throw lastError; +} + +/** + * Fetch with retry functionality + * @param {string} url - URL to fetch + * @param {Object} [options] - Fetch options + * @param {RetryOptions} [retryOptions] - Retry options + * @returns {Promise} - Fetch response + */ +export async function fetchWithRetry(url, options = {}, retryOptions = {}) { + return retry(() => fetch(url, options), retryOptions); +} diff --git a/.github/workflows/src/sdk-breaking-change-labels.js b/.github/workflows/src/sdk-breaking-change-labels.js index 4b210ed9ec23..6ca7801cb78d 100644 --- a/.github/workflows/src/sdk-breaking-change-labels.js +++ b/.github/workflows/src/sdk-breaking-change-labels.js @@ -1,8 +1,12 @@ // @ts-check -import { sdkLabels } from "../../src/sdk-types.js"; -import { LabelAction } from "./label.js"; +import { sdkLabels } from "../../shared/src/sdk-types.js"; +import { getAdoBuildInfoFromUrl, getAzurePipelineArtifact } from "./artifacts.js"; import { extractInputs } from "./context.js"; -import { getIssueNumber } from "./issues.js"; +import { LabelAction } from "./label.js"; + +/** + * @typedef {import("../../shared/src/sdk-types.js").SdkName} SdkName + */ /** * @typedef {Object} ArtifactResource @@ -16,98 +20,69 @@ import { getIssueNumber } from "./issues.js"; /** * @param {import('github-script').AsyncFunctionArguments} AsyncFunctionArguments - * @returns {Promise<{labelName: string, labelAction: LabelAction, issueNumber: number}>} + * @returns {Promise<{labelName: string | undefined, labelAction: LabelAction, issueNumber: number}>} */ export async function getLabelAndAction({ github, context, core }) { const inputs = await extractInputs(github, context, core); - const ado_build_id = inputs.ado_build_id; - const ado_project_url = inputs.ado_project_url; - const head_sha = inputs.head_sha; - if (!ado_build_id || !ado_project_url || !head_sha) { - throw new Error( - `Required inputs are not valid: ado_build_id:${ado_build_id}, ado_project_url:${ado_project_url}, head_sha:${head_sha}`, - ); + const details_url = inputs.details_url; + if (!details_url) { + throw new Error(`Required inputs are not valid: details_url:${details_url}`); } return await getLabelAndActionImpl({ - ado_build_id, - ado_project_url, - head_sha, + details_url, core, - github, }); } /** * @param {Object} params - * @param {string} params.ado_build_id - * @param {string} params.ado_project_url - * @param {string} params.head_sha - * @param {(import("@octokit/core").Octokit & import("@octokit/plugin-rest-endpoint-methods/dist-types/types.js").Api)} params.github + * @param {string} params.details_url * @param {typeof import("@actions/core")} params.core - * @returns {Promise<{labelName: string, labelAction: LabelAction, issueNumber: number}>} + * @param {import('./retries.js').RetryOptions} [params.retryOptions] + * @returns {Promise<{labelName: string | undefined, labelAction: LabelAction, issueNumber: number}>} */ -export async function getLabelAndActionImpl({ - ado_build_id, - ado_project_url, - head_sha, - core, - github, -}) { +export async function getLabelAndActionImpl({ details_url, core, retryOptions = {} }) { + // Override default logger from console.log to core.info + retryOptions = { logger: core.info, ...retryOptions }; + let issue_number = NaN; let labelAction; + /** @type {String | undefined} */ let labelName = ""; - const artifactName = "spec-gen-sdk-breaking-change-artifact"; + const buildInfo = getAdoBuildInfoFromUrl(details_url); + const ado_project_url = buildInfo.projectUrl; + const ado_build_id = buildInfo.buildId; + const artifactName = "spec-gen-sdk-artifact"; const artifactFileName = artifactName + ".json"; - const apiUrl = `${ado_project_url}/_apis/build/builds/${ado_build_id}/artifacts?artifactName=${artifactName}&api-version=7.0`; - core.info(`Calling Azure DevOps API to get the artifact: ${apiUrl}`); - - // Use Node.js fetch to call the API - const response = await fetch(apiUrl, { - method: "GET", - headers: { - "Content-Type": "application/json", - }, + const result = await getAzurePipelineArtifact({ + ado_build_id, + ado_project_url, + artifactName, + artifactFileName, + core, + retryOptions, + fallbackToFailedArtifact: true, + token: process.env.ADO_TOKEN, }); - - if (response.status === 404) { - core.info( - `Artifact '${artifactName}' not found (404). This might be expected if there are no breaking changes.`, + // Parse the JSON data + if (!result.artifactData) { + core.warning( + `Artifact '${artifactName}' not found in the build with details_url:${details_url} or failed to download it.`, ); - } else if (response.ok) { - // Step 1: Get the download URL for the artifact - /** @type {Artifacts} */ - const artifacts = /** @type {Artifacts} */ (await response.json()); - core.info(`Artifacts found: ${JSON.stringify(artifacts)}`); - if (!artifacts.resource || !artifacts.resource.downloadUrl) { - throw new Error( - `Download URL not found for the artifact ${artifactName}`, - ); - } - - let downloadUrl = artifacts.resource.downloadUrl; - const index = downloadUrl.indexOf("?format=zip"); - if (index !== -1) { - // Keep everything up to (but not including) "?format=zip" - downloadUrl = downloadUrl.substring(0, index); - } - downloadUrl += `?format=file&subPath=/${artifactFileName}`; - core.info(`Downloading artifact from: ${downloadUrl}`); - - // Step 2: Fetch Artifact Content (as a Buffer) - const artifactResponse = await fetch(downloadUrl); - if (!artifactResponse.ok) { - throw new Error( - `Failed to fetch artifact: ${artifactResponse.statusText}`, + } else { + core.info(`Artifact content: ${result.artifactData}`); + // Parse the JSON data + const specGenSdkArtifactInfo = JSON.parse(result.artifactData); + const labelActionText = specGenSdkArtifactInfo.labelAction; + issue_number = parseInt(specGenSdkArtifactInfo.prNumber, 10); + if (!issue_number) { + core.warning( + `No PR number found in the artifact '${artifactName}' with details_url:${details_url}.`, ); } - const artifactData = await artifactResponse.text(); - core.info(`Artifact content: ${artifactData}`); - - // Parse the JSON data - const breakingChangeResult = JSON.parse(artifactData); - const labelActionText = breakingChangeResult.labelAction; - const breakingChangeLanguage = breakingChangeResult.language; + /** @type {SdkName} */ + const breakingChangeLanguage = specGenSdkArtifactInfo.language; if (breakingChangeLanguage) { labelName = sdkLabels[`${breakingChangeLanguage}`].breakingChange; } @@ -118,19 +93,8 @@ export async function getLabelAndActionImpl({ } else if (labelActionText === false) { labelAction = LabelAction.Remove; } - - // Get the issue number from the check run - if (!issue_number) { - const { issueNumber } = await getIssueNumber({ head_sha, core, github }); - issue_number = issueNumber; - } - } else { - core.error( - `Failed to fetch artifacts: ${response.status}, ${response.statusText}`, - ); - const errorText = await response.text(); - core.error(`Error details: ${errorText}`); } + if (!labelAction) { core.info("No label action found, defaulting to None"); labelAction = LabelAction.None; diff --git a/.github/workflows/src/set-status.js b/.github/workflows/src/set-status.js new file mode 100644 index 000000000000..743bd52d5561 --- /dev/null +++ b/.github/workflows/src/set-status.js @@ -0,0 +1,196 @@ +// @ts-check + +import { extractInputs } from "./context.js"; +import { CheckConclusion, CheckStatus, CommitStatusState, PER_PAGE_MAX } from "./github.js"; + +// TODO: Add tests +/* v8 ignore start */ +/** + * @param {import('github-script').AsyncFunctionArguments} AsyncFunctionArguments + * @param {string} monitoredWorkflowName + * @param {string} requiredStatusName + * @param {string} overridingLabel + * @returns {Promise} + */ +export default async function setStatus( + { github, context, core }, + monitoredWorkflowName, + requiredStatusName, + overridingLabel, +) { + const { owner, repo, head_sha, issue_number } = await extractInputs(github, context, core); + + // Default target is this run itself + let target_url = + `https://github.com/${context.repo.owner}/${context.repo.repo}` + + `/actions/runs/${context.runId}`; + + return await setStatusImpl({ + owner, + repo, + head_sha, + issue_number, + target_url, + github, + core, + monitoredWorkflowName, + requiredStatusName, + overridingLabel, + }); +} +/* v8 ignore stop */ + +/** + * @param {Object} params + * @param {string} params.owner + * @param {string} params.repo + * @param {string} params.head_sha + * @param {number} params.issue_number + * @param {string} params.target_url + * @param {(import("@octokit/core").Octokit & import("@octokit/plugin-rest-endpoint-methods/dist-types/types.js").Api & { paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; })} params.github + * @param {typeof import("@actions/core")} params.core + * @param {string} params.monitoredWorkflowName + * @param {string} params.requiredStatusName + * @param {string} params.overridingLabel + * @returns {Promise} + */ +export async function setStatusImpl({ + owner, + repo, + head_sha, + issue_number, + target_url, + github, + core, + monitoredWorkflowName, + requiredStatusName, + overridingLabel, +}) { + // TODO: Try to extract labels from context (when available) to avoid unnecessary API call + const labels = await github.paginate(github.rest.issues.listLabelsOnIssue, { + owner: owner, + repo: repo, + issue_number: issue_number, + per_page: PER_PAGE_MAX, + }); + const prLabels = labels.map((label) => label.name); + + core.info(`Labels: ${prLabels}`); + + // Parse overriding labels (comma-separated string to array) + const overridingLabelsArray = overridingLabel + ? overridingLabel + .split(",") + .map((label) => label.trim()) + .filter((label) => label) // Filter out empty labels + : []; + + // Check if any overriding label is present + const foundOverridingLabel = overridingLabelsArray.find((label) => prLabels.includes(label)); + + if (foundOverridingLabel) { + const description = `Found label '${foundOverridingLabel}'`; + core.info(description); + + const state = CheckConclusion.SUCCESS; + core.info(`Setting status to '${state}' for '${requiredStatusName}'`); + + await github.rest.repos.createCommitStatus({ + owner, + repo, + sha: head_sha, + state, + context: requiredStatusName, + description, + target_url, + }); + + return; + } + + const workflowRuns = await github.paginate(github.rest.actions.listWorkflowRunsForRepo, { + owner, + repo, + event: "pull_request", + head_sha, + per_page: PER_PAGE_MAX, + }); + + core.info("Workflow Runs:"); + workflowRuns.forEach((wf) => { + core.info(`- ${wf.name}: ${wf.conclusion || wf.status}`); + }); + + const targetRuns = workflowRuns + .filter((wf) => wf.name == monitoredWorkflowName) + // Sort by "updated_at" descending + .sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()); + + // Sorted by "updated_at" descending, so most recent run is at index 0. + // If "targetRuns.length === 0", run will be "undefined", which the following + // code must handle. + const run = targetRuns[0]; + + if (!run) { + console.log(`No workflow runs found for '${monitoredWorkflowName}'.`); + } + + if (run) { + /** + * Update target to the "Analyze Code" run, which contains the meaningful output. + * + * @example https://github.com/mikeharder/azure-rest-api-specs/actions/runs/14509047569 + */ + target_url = run.html_url; + + if (run.conclusion === CheckConclusion.FAILURE) { + /** + * Update target to point directly to the first failed job + * + * @example https://github.com/mikeharder/azure-rest-api-specs/actions/runs/14509047569/job/40703679014?pr=18 + */ + + const jobs = await github.paginate(github.rest.actions.listJobsForWorkflowRun, { + owner, + repo, + run_id: run.id, + per_page: PER_PAGE_MAX, + }); + const failedJobs = jobs.filter((job) => job.conclusion === CheckConclusion.FAILURE); + const failedJob = failedJobs[0]; + if (failedJob?.html_url) { + target_url = `${failedJob.html_url}?pr=${issue_number}`; + } + } + } + + if (run?.status === CheckStatus.COMPLETED) { + const state = + run.conclusion === CheckConclusion.SUCCESS + ? CheckConclusion.SUCCESS + : CheckConclusion.FAILURE; + + core.info(`Setting status to '${state}' for '${requiredStatusName}'`); + await github.rest.repos.createCommitStatus({ + owner, + repo, + sha: head_sha, + state, + context: requiredStatusName, + target_url, + }); + } else { + core.info( + `No workflow runs found for '${monitoredWorkflowName}'. Setting status to ${CommitStatusState.PENDING} for required status: ${requiredStatusName}.`, + ); + // Run was not found (not started), or not completed + await github.rest.repos.createCommitStatus({ + owner, + repo, + sha: head_sha, + state: CommitStatusState.PENDING, + context: requiredStatusName, + target_url, + }); + } +} diff --git a/.github/workflows/src/spec-gen-sdk-status.js b/.github/workflows/src/spec-gen-sdk-status.js new file mode 100644 index 000000000000..5da9b3f6bef1 --- /dev/null +++ b/.github/workflows/src/spec-gen-sdk-status.js @@ -0,0 +1,199 @@ +// @ts-check +import { extractInputs } from "./context.js"; +import { getAdoBuildInfoFromUrl, getAzurePipelineArtifact } from "./artifacts.js"; +import { CheckStatus, CommitStatusState, PER_PAGE_MAX, writeToActionsSummary } from "./github.js"; + +/** + * @param {import('github-script').AsyncFunctionArguments} AsyncFunctionArguments + * @returns {Promise} + */ +export default async function setSpecGenSdkStatus({ github, context, core }) { + const inputs = await extractInputs(github, context, core); + const head_sha = inputs.head_sha; + const details_url = inputs.details_url; + if (!details_url || !head_sha) { + throw new Error( + `Required inputs are not valid: details_url:${details_url}, head_sha:${head_sha}`, + ); + } + const owner = inputs.owner; + const repo = inputs.repo; + // Default target is this run itself + let target_url = + `https://github.com/${context.repo.owner}/${context.repo.repo}` + + `/actions/runs/${context.runId}`; + + return await setSpecGenSdkStatusImpl({ + owner, + repo, + head_sha, + target_url, + github, + core, + }); +} + +/** + * @param {Object} params + * @param {string} params.owner + * @param {string} params.repo + * @param {string} params.head_sha + * @param {string} params.target_url + * @param {(import("@octokit/core").Octokit & import("@octokit/plugin-rest-endpoint-methods/dist-types/types.js").Api & { paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; })} params.github + * @param {typeof import("@actions/core")} params.core + * @returns {Promise} + */ +export async function setSpecGenSdkStatusImpl({ owner, repo, head_sha, target_url, github, core }) { + const statusName = "SDK Validation Status"; + const checks = await github.paginate(github.rest.checks.listForRef, { + owner, + repo, + ref: head_sha, + per_page: PER_PAGE_MAX, + }); + // Filter sdk generation check runs + const specGenSdkChecks = checks.filter( + (check) => check.app?.name === "Azure Pipelines" && check.name.includes("SDK Validation"), + ); + + core.info(`Found ${specGenSdkChecks.length} check runs from Azure Pipelines:`); + for (const check of specGenSdkChecks) { + core.info(`- ${check.name}: ${check.status} (${check.conclusion})`); + } + + // Check if all SDK generation checks have completed + const allCompleted = + specGenSdkChecks.length > 0 && + specGenSdkChecks.every((check) => check.status === CheckStatus.COMPLETED); + const allIncompletedChecks = specGenSdkChecks.filter( + (check) => check.status !== CheckStatus.COMPLETED, + ); + for (const check of allIncompletedChecks) { + core.info(`incompleted check runs: ${check.name}: ${check.status} (${check.conclusion})`); + } + + if (!allCompleted) { + // At least one check is still running or none found yet, set status to pending + core.info("Some SDK Validation checks are not completed. Setting status to pending."); + + await github.rest.repos.createCommitStatus({ + owner, + repo, + sha: head_sha, + state: CommitStatusState.PENDING, + context: statusName, + description: "Waiting for all SDK Validation checks to complete", + target_url, + }); + } else { + // All checks are completed, check their conclusions + const result = await processResult({ + checkRuns: specGenSdkChecks, + core, + }); + + core.info(`All SDK Validation checks completed. Setting status to ${result.state}.`); + + await github.rest.repos.createCommitStatus({ + owner, + repo, + sha: head_sha, + state: result.state, + context: statusName, + description: result.description, + target_url, + }); + } +} + +/** + * @param {Object} params + * @param {Array} params.checkRuns + * @param {typeof import("@actions/core")} params.core + * @returns {Promise<{state: import("./github.js").CommitStatusState, description: string}>} + */ +async function processResult({ checkRuns, core }) { + /** @type {import("./github.js").CommitStatusState} */ + let state = CommitStatusState.SUCCESS; + let specGenSdkFailedRequiredLanguages = ""; + let description = "SDK Validation CI checks succeeded"; + + // Create a summary of the results + let summaryContent = "## SDK Validation CI Checks Result\n\n"; + summaryContent += "| Language | Status | Required Check |\n"; + summaryContent += "|----------|--------|---------------|\n"; + + for (const checkRun of checkRuns) { + core.info(`Processing check run: ${checkRun.name} (${checkRun.conclusion})`); + const buildInfo = getAdoBuildInfoFromUrl(checkRun.details_url); + const ado_project_url = buildInfo.projectUrl; + const ado_build_id = buildInfo.buildId; + let artifactName = "spec-gen-sdk-artifact"; + const artifactFileName = `${artifactName}.json`; + const result = await getAzurePipelineArtifact({ + ado_build_id, + ado_project_url, + artifactName, + artifactFileName, + core, + fallbackToFailedArtifact: true, + token: process.env.ADO_TOKEN, + }); + // Parse the JSON data + if (!result.artifactData) { + throw new Error( + `Artifact '${artifactName}' not found in the build with details_url:${checkRun.details_url}`, + ); + } + const artifactJsonObj = JSON.parse(result.artifactData); + const language = artifactJsonObj.language; + const shortLanguageName = language.split("-").pop(); + const executionResult = artifactJsonObj.result; + const isSpecGenSdkCheckRequired = artifactJsonObj.isSpecGenSdkCheckRequired; + if (isSpecGenSdkCheckRequired && executionResult === "failed") { + state = CommitStatusState.FAILURE; + specGenSdkFailedRequiredLanguages += shortLanguageName + ", "; + } + + // Add status emoji + const statusEmoji = + executionResult === "succeeded" + ? "✅" + : executionResult === "failed" + ? "❌" + : executionResult === "warning" + ? "⚠️" + : executionResult === "pending" + ? "⏳" + : "❓"; + + summaryContent += `| ${shortLanguageName} | ${statusEmoji} ${executionResult} | ${isSpecGenSdkCheckRequired} |\n`; + } + + if (state === CommitStatusState.FAILURE) { + specGenSdkFailedRequiredLanguages = specGenSdkFailedRequiredLanguages.replace(/,\s*$/, ""); + description = `SDK Validation failed for ${specGenSdkFailedRequiredLanguages} languages`; + } + + // Add overall result + summaryContent += "\n### Overall Result\n\n"; + summaryContent += + state === CommitStatusState.SUCCESS + ? "✅ All required SDK Validation CI checks passed successfully!" + : `❌ SDK Validation CI checks failed for: ${specGenSdkFailedRequiredLanguages}`; + + // Add next steps + if (state === CommitStatusState.FAILURE) { + summaryContent += + "\n### Next Steps\n\n" + + `Please fix any issues in the the SDK Validation CI checks for languages: ${specGenSdkFailedRequiredLanguages}.`; + } + + // Write to the summary page + await writeToActionsSummary(summaryContent, core); + + return { + state, + description, + }; +} diff --git a/.github/workflows/src/update-labels.js b/.github/workflows/src/update-labels.js index 1ec70e2136ca..fd27486ced4e 100644 --- a/.github/workflows/src/update-labels.js +++ b/.github/workflows/src/update-labels.js @@ -32,29 +32,19 @@ export default async function updateLabels({ github, context, core }) { * @param {(import("@octokit/core").Octokit & import("@octokit/plugin-rest-endpoint-methods/dist-types/types.js").Api & { paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; })} params.github * @param {typeof import("@actions/core")} params.core */ -export async function updateLabelsImpl({ - owner, - repo, - issue_number, - run_id, - github, - core, -}) { +export async function updateLabelsImpl({ owner, repo, issue_number, run_id, github, core }) { /** @type {string[]} */ let artifactNames = []; if (run_id) { // List artifacts from a single run_id core.info(`listWorkflowRunArtifacts(${owner}, ${repo}, ${run_id})`); - const artifacts = await github.paginate( - github.rest.actions.listWorkflowRunArtifacts, - { - owner: owner, - repo: repo, - run_id: run_id, - per_page: PER_PAGE_MAX, - }, - ); + const artifacts = await github.paginate(github.rest.actions.listWorkflowRunArtifacts, { + owner: owner, + repo: repo, + run_id: run_id, + per_page: PER_PAGE_MAX, + }); artifactNames = artifacts.map((a) => a.name); } else { @@ -97,10 +87,7 @@ export async function updateLabelsImpl({ core.info(`labelsToAdd: ${JSON.stringify(labelsToAdd)}`); core.info(`labelsToRemove: ${JSON.stringify(labelsToRemove)}`); - if ( - (labelsToAdd.length > 0 || labelsToRemove.length > 0) && - Number.isNaN(issue_number) - ) { + if ((labelsToAdd.length > 0 || labelsToRemove.length > 0) && Number.isNaN(issue_number)) { throw new Error( `Invalid value for 'issue_number':${issue_number}. Expected an 'issue-number' artifact created by the workflow run.`, ); @@ -129,11 +116,7 @@ export async function updateLabelsImpl({ name: name, }); } catch (error) { - if ( - error instanceof Error && - "status" in error && - error.status === 404 - ) { + if (error instanceof Error && "status" in error && error.status === 404) { core.info(`Ignoring error: ${error.status} - ${error.message}`); } else { throw error; diff --git a/.github/workflows/src/verify-run-status.js b/.github/workflows/src/verify-run-status.js new file mode 100644 index 000000000000..cee373b602d8 --- /dev/null +++ b/.github/workflows/src/verify-run-status.js @@ -0,0 +1,199 @@ +import { extractInputs } from "./context.js"; +import { getCheckRuns, getCommitStatuses, getWorkflowRuns } from "./github.js"; + +/** + * @typedef {import('@octokit/plugin-rest-endpoint-methods').RestEndpointMethodTypes} RestEndpointMethodTypes + * @typedef {RestEndpointMethodTypes["repos"]["listCommitStatusesForRef"]["response"]["data"]} CommitStatuses + */ + +const SUPPORTED_EVENTS = ["workflow_run", "check_run", "check_suite"]; + +/* v8 ignore start */ +/** + * Given the name of a completed check run name and a completed workflow, verify + * that both have the same conclusion. If conclusions are different, fail the + * action. + * @param {import('github-script').AsyncFunctionArguments} AsyncFunctionArguments + */ +export async function verifyRunStatus({ github, context, core }) { + const checkRunName = process.env.CHECK_RUN_NAME; + if (!checkRunName) { + throw new Error("CHECK_RUN_NAME is not set"); + } + + const commitStatusName = process.env.COMMIT_STATUS_NAME; + const workflowName = process.env.WORKFLOW_NAME; + if (!commitStatusName && !workflowName) { + throw new Error("Neither COMMIT_STATUS nor WORKFLOW_NAME is not set"); + } + + if (!SUPPORTED_EVENTS.some((e) => e === context.eventName)) { + throw new Error( + `Unsupported event: ${context.eventName}. Supported events: ${SUPPORTED_EVENTS.join(", ")}`, + ); + } + + if (context.eventName === "check_suite" && context.payload.check_suite.status !== "completed") { + core.setFailed( + `Check suite ${context.payload.check_suite.app.name} is not completed. Cannot evaluate incomplete check suite.`, + ); + return; + } + + return await verifyRunStatusImpl({ + github, + context, + core, + checkRunName, + commitStatusName, + workflowName, + }); +} +/* v8 ignore stop */ + +/** + * @param {Object} params + * @param {import('github-script').AsyncFunctionArguments["github"]} params.github + * @param {import('github-script').AsyncFunctionArguments["context"]} params.context + * @param {import('github-script').AsyncFunctionArguments["core"]} params.core + * @param {string} params.checkRunName + * @param {string} [params.commitStatusName] + * @param {string} [params.workflowName] + */ +export async function verifyRunStatusImpl({ + github, + context, + core, + checkRunName, + commitStatusName, + workflowName, +}) { + if (context.eventName == "check_run") { + const contextRunName = context.payload.check_run.name; + if (contextRunName !== checkRunName) { + core.setFailed( + `Check run name (${contextRunName}) does not match input: ${checkRunName}. Ensure job is filtering by github.event.check_run.name.`, + ); + return; + } + } + + const { head_sha } = await extractInputs(github, context, core); + + let checkRun; + if (context.eventName == "check_run") { + checkRun = context.payload.check_run; + } else { + const checkRuns = await getCheckRuns(github, context, checkRunName, head_sha); + if (checkRuns.length === 0) { + if (context.eventName === "check_suite") { + const message = `Could not locate check run ${checkRunName} in check suite ${context.payload.check_suite.app.name}. Ensure job is filtering by github.event.check_suite.app.name.`; + core.setFailed(message); + return; + } + + core.notice( + `No completed check run with name: ${checkRunName}. Not enough information to judge success or failure. Ending with success status.`, + ); + return; + } + + // Use the most recent check run + checkRun = checkRuns[0]; + } + + core.info( + `Check run name: ${checkRun.name}, conclusion: ${checkRun.conclusion}, URL: ${checkRun.html_url}`, + ); + core.debug(`Check run: ${JSON.stringify(checkRun)}`); + + if (commitStatusName) { + core.info(`commitStatusName: ${commitStatusName}`); + + // Get the commit status + let commitStatusContext, commitStatusState, commitStatusTargetUrl; + + // Fetch the commit status from the API + try { + const commitStatuses = await getCommitStatuses(github, context, commitStatusName, head_sha); + if (commitStatuses && commitStatuses.length > 0) { + commitStatusContext = commitStatuses[0].context; + commitStatusState = commitStatuses[0].state; + commitStatusTargetUrl = commitStatuses[0].target_url; + } else { + // Count the commit status as pending if not found and return with no-op + core.notice( + `Commit status is in pending state. Skipping comparison with check run conclusion.`, + ); + return; + } + } catch (error) { + core.setFailed( + `Failed to fetch commit status: ${error instanceof Error ? error.message : String(error)}`, + ); + return; + } + + core.info( + `Commit status context: ${commitStatusContext}, state: ${commitStatusState}, URL: ${commitStatusTargetUrl}`, + ); + + if (commitStatusState === "pending") { + core.notice( + `Commit status is in pending state. Skipping comparison with check run conclusion.`, + ); + return; + } + + // Normalize check run conclusion: treat 'neutral' as 'success' + const normalizedCheckRunConclusion = + checkRun.conclusion === "neutral" ? "success" : checkRun.conclusion; + + if (normalizedCheckRunConclusion !== commitStatusState) { + core.setFailed( + `Check run conclusion (${checkRun.conclusion}) does not match commit status state (${commitStatusState})`, + ); + return; + } + + core.notice( + `Conclusions match for check run ${checkRunName} and commit status ${commitStatusName}`, + ); + } + + if (workflowName) { + let workflowRun; + if (context.eventName == "workflow_run") { + workflowRun = context.payload.workflow_run; + } else { + const workflowRuns = await getWorkflowRuns(github, context, workflowName, head_sha); + if (workflowRuns.length === 0) { + core.notice( + `No completed workflow run with name: ${workflowName}. Not enough information to judge success or failure. Ending with success status.`, + ); + return; + } + + // Use the most recent workflow run + workflowRun = workflowRuns[0]; + } + + core.info( + `Workflow run name: ${workflowRun.name}, conclusion: ${workflowRun.conclusion}, URL: ${workflowRun.html_url}`, + ); + core.debug(`Workflow run: ${JSON.stringify(workflowRun)}`); + + // Normalize check run conclusion: treat 'neutral' as 'success' + const normalizedCheckRunConclusion = + checkRun.conclusion === "neutral" ? "success" : checkRun.conclusion; + + if (normalizedCheckRunConclusion !== workflowRun.conclusion) { + core.setFailed( + `Check run conclusion (${checkRun.conclusion}) does not match workflow run conclusion (${workflowRun.conclusion})`, + ); + return; + } + + core.notice(`Conclusions match for check run ${checkRunName} and workflow run ${workflowName}`); + } +} diff --git a/.github/workflows/swagger-modelvalidation-code.yaml b/.github/workflows/swagger-modelvalidation-code.yaml new file mode 100644 index 000000000000..7ee3c19b8571 --- /dev/null +++ b/.github/workflows/swagger-modelvalidation-code.yaml @@ -0,0 +1,24 @@ +name: "[TEST-IGNORE] Swagger ModelValidation" + +on: pull_request + +permissions: + contents: read + +jobs: + oav: + name: "[TEST-IGNORE] Swagger ModelValidation" + runs-on: ubuntu-24.04 + + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Swagger Model Validation + run: | + npm exec --no -- oav-runner examples diff --git a/.github/workflows/swagger-modelvalidation-status.yaml b/.github/workflows/swagger-modelvalidation-status.yaml new file mode 100644 index 000000000000..1cf8aeb1b67e --- /dev/null +++ b/.github/workflows/swagger-modelvalidation-status.yaml @@ -0,0 +1,35 @@ +name: "[TEST-IGNORE] Swagger ModelValidation - Set Status" + +on: + # Must run on pull_request_target instead of pull_request, since the latter cannot trigger on + # labels from bot accounts in fork PRs. pull_request_target is also more similar to the other + # trigger "workflow_run" -- they are both privileged and run in the target branch and repo -- + # which simplifies implementation. + pull_request_target: + types: + # Run workflow on default types, to update status as quickly as possible. + - opened + - synchronize + - reopened + # Depends on labels, so must re-evaluate whenever a relevant label is manually added or removed. + - labeled + - unlabeled + workflow_run: + workflows: ["\\[TEST-IGNORE\\] Swagger ModelValidation"] + types: [completed] + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + model-validation-status: + name: Set ModelValidation Status + uses: ./.github/workflows/_reusable-set-check-status.yml + with: + monitored_workflow_name: "[TEST-IGNORE] Swagger ModelValidation" + required_check_name: "[TEST-IGNORE] Swagger ModelValidation" + overriding_label: "Approved-ModelValidation" diff --git a/.github/workflows/swagger-semanticvalidation-code.yaml b/.github/workflows/swagger-semanticvalidation-code.yaml new file mode 100644 index 000000000000..532de1554d88 --- /dev/null +++ b/.github/workflows/swagger-semanticvalidation-code.yaml @@ -0,0 +1,24 @@ +name: "[TEST-IGNORE] Swagger SemanticValidation" + +on: pull_request + +permissions: + contents: read + +jobs: + oav: + name: "[TEST-IGNORE] Swagger SemanticValidation" + runs-on: ubuntu-24.04 + + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Swagger Semantic Validation + run: | + npm exec --no -- oav-runner specs diff --git a/.github/workflows/swagger-semanticvalidation-status.yaml b/.github/workflows/swagger-semanticvalidation-status.yaml new file mode 100644 index 000000000000..9e4a2552945f --- /dev/null +++ b/.github/workflows/swagger-semanticvalidation-status.yaml @@ -0,0 +1,35 @@ +name: "[TEST-IGNORE] Swagger SemanticValidation - Set Status" + +on: + # Must run on pull_request_target instead of pull_request, since the latter cannot trigger on + # labels from bot accounts in fork PRs. pull_request_target is also more similar to the other + # trigger "workflow_run" -- they are both privileged and run in the target branch and repo -- + # which simplifies implementation. + pull_request_target: + types: + # Run workflow on default types, to update status as quickly as possible. + - opened + - synchronize + - reopened + # Depends on labels, so must re-evaluate whenever a relevant label is manually added or removed. + - labeled + - unlabeled + workflow_run: + workflows: ["\\[TEST-IGNORE\\] Swagger SemanticValidation"] + types: [completed] + +permissions: + actions: read + contents: read + issues: read + pull-requests: read + statuses: write + +jobs: + spec-validation-status: + name: Set SemanticValidation Status + uses: ./.github/workflows/_reusable-set-check-status.yml + with: + monitored_workflow_name: "[TEST-IGNORE] Swagger SemanticValidation" + required_check_name: "[TEST-IGNORE] Swagger SemanticValidation" + overriding_label: "Approved-SemanticValidation" diff --git a/.github/workflows/test/arm-auto-signoff.test.js b/.github/workflows/test/arm-auto-signoff.test.js index 9c43de6a1abd..c27801369f36 100644 --- a/.github/workflows/test/arm-auto-signoff.test.js +++ b/.github/workflows/test/arm-auto-signoff.test.js @@ -1,10 +1,7 @@ import { describe, expect, it } from "vitest"; import { getLabelActionImpl } from "../src/arm-auto-signoff.js"; import { LabelAction } from "../src/label.js"; -import { - createMockCore, - createMockGithub as createMockGithubBase, -} from "./mocks.js"; +import { createMockCore, createMockGithub as createMockGithubBase } from "./mocks.js"; const core = createMockCore(); diff --git a/.github/workflows/test/arm-incremental-typespec.test.js b/.github/workflows/test/arm-incremental-typespec.test.js index 6a3ba135949a..668613e1acd1 100644 --- a/.github/workflows/test/arm-incremental-typespec.test.js +++ b/.github/workflows/test/arm-incremental-typespec.test.js @@ -1,17 +1,31 @@ -import { describe, expect, it, vi } from "vitest"; -import * as changedFiles from "../../src/changed-files.js"; -import * as git from "../../src/git.js"; +import { relative, resolve } from "path"; +import { afterEach, describe, expect, it, vi } from "vitest"; +import { repoRoot } from "../../shared/test/repo.js"; + +vi.mock("simple-git", () => ({ + simpleGit: vi.fn().mockReturnValue({ + raw: vi.fn().mockResolvedValue(""), + show: vi.fn().mockResolvedValue(""), + }), +})); + +import * as simpleGit from "simple-git"; +import * as changedFiles from "../../shared/src/changed-files.js"; import { contosoReadme, swaggerHandWritten, swaggerTypeSpecGenerated, -} from "../../test/examples.js"; +} from "../../shared/test/examples.js"; import incrementalTypeSpec from "../src/arm-incremental-typespec.js"; import { createMockCore } from "./mocks.js"; const core = createMockCore(); describe("incrementalTypeSpec", () => { + afterEach(() => { + vi.clearAllMocks(); + }); + it("rejects if inputs null", async () => { await expect(incrementalTypeSpec({})).rejects.toThrow(); }); @@ -28,38 +42,31 @@ describe("incrementalTypeSpec", () => { vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); - const showSpy = vi.spyOn(git, "show").mockResolvedValue(swaggerHandWritten); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockResolvedValue(swaggerHandWritten); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); }); it("returns false if changed files add a new RP", async () => { - const specDir = - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; + const specDir = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; const swaggerPath = `${specDir}/preview/2021-10-01-preview/contoso.json`; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); const showSpy = vi - .spyOn(git, "show") + .mocked(simpleGit.simpleGit().show) .mockResolvedValue(swaggerTypeSpecGenerated); // "git ls-tree" returns "" if the spec folder doesn't exist in the base branch - const lsTreeSpy = vi.spyOn(git, "lsTree").mockResolvedValue(""); + const rawSpy = vi.mocked(simpleGit.simpleGit().raw).mockResolvedValue(""); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); - expect(lsTreeSpy).toBeCalledWith( - "HEAD^", - specDir, - expect.objectContaining({ - args: "-r --name-only", - }), - ); + expect(rawSpy).toBeCalledWith(["ls-tree", "-r", "--name-only", "HEAD^", specDir]); }); it("returns false if swagger deleted", async () => { @@ -69,42 +76,38 @@ describe("incrementalTypeSpec", () => { vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); const showSpy = vi - .spyOn(git, "show") - .mockRejectedValue( - new Error("path contoso.json does not exist in 'HEAD'"), - ); + .mocked(simpleGit.simpleGit().show) + .mockRejectedValue(new Error("path contoso.json does not exist in 'HEAD'")); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); }); it("returns false if readme deleted", async () => { - const readmePath = - "specification/contosowidgetmanager/resource-manager/readme.md"; + const readmePath = "specification/contosowidgetmanager/resource-manager/readme.md"; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([readmePath]); const showSpy = vi - .spyOn(git, "show") + .mocked(simpleGit.simpleGit().show) .mockRejectedValue(new Error("path readme.md does not exist in 'HEAD'")); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", readmePath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${readmePath}`]); }); it("returns false if readme contains no input-files", async () => { - const readmePath = - "specification/contosowidgetmanager/resource-manager/readme.md"; + const readmePath = "specification/contosowidgetmanager/resource-manager/readme.md"; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([readmePath]); - const showSpy = vi.spyOn(git, "show").mockResolvedValue(""); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockResolvedValue(""); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", readmePath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${readmePath}`]); }); it("returns false if swagger cannot be parsed as JSON", async () => { @@ -113,40 +116,33 @@ describe("incrementalTypeSpec", () => { vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); - const showSpy = vi - .spyOn(git, "show") - .mockResolvedValue("not } valid { json"); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockResolvedValue("not } valid { json"); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); }); it("returns false if tsp conversion", async () => { - const specDir = - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; + const specDir = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; const swaggerPath = `${specDir}/preview/2021-10-01-preview/contoso.json`; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); const showSpy = vi - .spyOn(git, "show") - .mockImplementation((treeIsh) => - treeIsh == "HEAD" ? swaggerTypeSpecGenerated : swaggerHandWritten, + .mocked(simpleGit.simpleGit().show) + .mockImplementation(async ([treePath]) => + treePath.split(":")[0] == "HEAD" ? swaggerTypeSpecGenerated : swaggerHandWritten, ); - const lsTreeSpy = vi.spyOn(git, "lsTree").mockResolvedValue(swaggerPath); + const lsTreeSpy = vi.mocked(simpleGit.simpleGit().raw).mockResolvedValue(swaggerPath); await expect(incrementalTypeSpec({ core })).resolves.toBe(false); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); - expect(showSpy).toBeCalledWith("HEAD^", swaggerPath, expect.anything()); + expect(showSpy).toHaveBeenCalledWith([`HEAD:${swaggerPath}`]); + expect(showSpy).toHaveBeenCalledWith([`HEAD^:${swaggerPath}`]); - expect(lsTreeSpy).toBeCalledWith( - "HEAD^", - specDir, - expect.objectContaining({ args: "-r --name-only" }), - ); + expect(lsTreeSpy).toBeCalledWith(["ls-tree", "-r", "--name-only", "HEAD^", specDir]); }); it("throws if git show for swagger returns unknown error", async () => { @@ -155,93 +151,83 @@ describe("incrementalTypeSpec", () => { vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); - const showSpy = vi.spyOn(git, "show").mockRejectedValue("string error"); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockRejectedValue("string error"); await expect(incrementalTypeSpec({ core })).rejects.toThrowError(); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); }); it("throws if git show for readme returns unknown error", async () => { - const readmePath = - "specification/contosowidgetmanager/resource-manager/readme.md"; + const readmePath = "specification/contosowidgetmanager/resource-manager/readme.md"; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([readmePath]); - const showSpy = vi.spyOn(git, "show").mockRejectedValue("string error"); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockRejectedValue("string error"); await expect(incrementalTypeSpec({ core })).rejects.toThrowError(); - expect(showSpy).toBeCalledWith("HEAD", readmePath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${readmePath}`]); }); it("returns true if changed files are incremental changes to an existing TypeSpec RP swagger", async () => { - const specDir = - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; + const specDir = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; const swaggerPath = `${specDir}/preview/2021-10-01-preview/contoso.json`; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([swaggerPath]); const showSpy = vi - .spyOn(git, "show") + .mocked(simpleGit.simpleGit().show) .mockResolvedValue(swaggerTypeSpecGenerated); - const lsTreeSpy = vi.spyOn(git, "lsTree").mockResolvedValue(swaggerPath); + const lsTreeSpy = vi.mocked(simpleGit.simpleGit().raw).mockResolvedValue(swaggerPath); await expect(incrementalTypeSpec({ core })).resolves.toBe(true); - expect(showSpy).toBeCalledWith("HEAD", swaggerPath, expect.anything()); - expect(showSpy).toBeCalledWith("HEAD^", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${swaggerPath}`]); + expect(showSpy).toBeCalledWith([`HEAD^:${swaggerPath}`]); - expect(lsTreeSpy).toBeCalledWith( - "HEAD^", - specDir, - expect.objectContaining({ - args: "-r --name-only", - }), - ); + expect(lsTreeSpy).toBeCalledWith(["ls-tree", "-r", "--name-only", "HEAD^", specDir]); }); it("returns true if changed files are incremental changes to an existing TypeSpec RP readme", async () => { - const specDir = - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; + const specDir = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; const swaggerPath = `${specDir}/preview/2021-10-01-preview/contoso.json`; - const readmePath = - "specification/contosowidgetmanager/resource-manager/readme.md"; + const readmePath = "specification/contosowidgetmanager/resource-manager/readme.md"; vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([readmePath]); - const showSpy = vi - .spyOn(git, "show") - .mockImplementation(async (_treeIsh, path) => { - if (path === swaggerPath) { - return swaggerTypeSpecGenerated; - } else if (path === readmePath) { - return contosoReadme; - } else { - throw new Error("does not exist"); - } - }); - - const lsTreeSpy = vi.spyOn(git, "lsTree").mockResolvedValue(swaggerPath); + const showSpy = vi.mocked(simpleGit.simpleGit().show).mockImplementation(async ([treePath]) => { + const path = treePath.split(":")[1]; + if (path === swaggerPath) { + return swaggerTypeSpecGenerated; + } else if (path === readmePath) { + return contosoReadme; + } else { + throw new Error("does not exist"); + } + }); + + const lsTreeSpy = vi.mocked(simpleGit.simpleGit().raw).mockResolvedValue(swaggerPath); await expect(incrementalTypeSpec({ core })).resolves.toBe(true); - expect(showSpy).toBeCalledWith("HEAD", readmePath, expect.anything()); - expect(showSpy).toBeCalledWith("HEAD^", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD:${readmePath}`]); + expect(showSpy).toBeCalledWith([`HEAD^:${swaggerPath}`]); - expect(lsTreeSpy).toHaveBeenCalledWith( + expect(lsTreeSpy).toHaveBeenCalledWith([ + "ls-tree", + "-r", + "--name-only", "HEAD^", - specDir, - expect.objectContaining({ args: "-r --name-only" }), - ); + relative(repoRoot, resolve(repoRoot, specDir)), + ]); }); it("returns true if changed files are incremental changes to an existing TypeSpec RP example", async () => { - const specDir = - "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; + const specDir = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso"; const swaggerPath = `${specDir}/preview/2021-10-01-preview/contoso.json`; const examplesPath = "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"; @@ -249,19 +235,15 @@ describe("incrementalTypeSpec", () => { vi.spyOn(changedFiles, "getChangedFiles").mockResolvedValue([examplesPath]); const showSpy = vi - .spyOn(git, "show") + .mocked(simpleGit.simpleGit().show) .mockResolvedValue(swaggerTypeSpecGenerated); - const lsTreeSpy = vi.spyOn(git, "lsTree").mockResolvedValue(swaggerPath); + const lsTreeSpy = vi.mocked(simpleGit.simpleGit().raw).mockResolvedValue(swaggerPath); await expect(incrementalTypeSpec({ core })).resolves.toBe(true); - expect(showSpy).toBeCalledWith("HEAD^", swaggerPath, expect.anything()); + expect(showSpy).toBeCalledWith([`HEAD^:${swaggerPath}`]); - expect(lsTreeSpy).toHaveBeenCalledWith( - "HEAD^", - specDir, - expect.objectContaining({ args: "-r --name-only" }), - ); + expect(lsTreeSpy).toHaveBeenCalledWith(["ls-tree", "-r", "--name-only", "HEAD^", specDir]); }); }); diff --git a/.github/workflows/test/artifacts.test.js b/.github/workflows/test/artifacts.test.js new file mode 100644 index 000000000000..42cac2507539 --- /dev/null +++ b/.github/workflows/test/artifacts.test.js @@ -0,0 +1,768 @@ +import { describe, expect, it, vi, beforeEach } from "vitest"; +import { + getAzurePipelineArtifact, + getAdoBuildInfoFromUrl, + fetchFailedArtifact, +} from "../src/artifacts.js"; +import { createMockCore } from "./mocks.js"; + +// Mock dependencies +vi.mock("../src/context.js", () => ({ + extractInputs: vi.fn(), +})); + +// Mock global fetch +global.fetch = vi.fn(); +const mockCore = createMockCore(); + +describe("getAzurePipelineArtifact function", () => { + const inputs = { + ado_build_id: "12345", + ado_project_url: "https://dev.azure.com/project", + artifactName: "spec-gen-sdk-artifact", + artifactFileName: "spec-gen-sdk-artifact.json", + core: mockCore, + }; + + // Reset mocks before each test + beforeEach(() => { + vi.resetAllMocks(); + }); + + it("should pass headers to fetchFailedArtifact when using fallback mechanism", async () => { + const testToken = "test-auth-token"; + const expectedHeaders = { + "Content-Type": "application/json", + Authorization: `Bearer ${testToken}`, + }; + + // Mock initial fetch failure with 404 + const mockInitialResponse = { + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Artifact not found"), + }; + + // Mock list artifacts response + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "spec-gen-sdk-artifact-FailedAttempt1", + resource: { downloadUrl: "https://example.com/download2" }, + }, + ], + }), + status: 200, + statusText: "OK", + }; + + // Mock response for fetching specific failed artifact + const mockFailedArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { + downloadUrl: "https://example.com/failed-artifact-download?format=zip", + }, + }), + status: 200, + statusText: "OK", + }; + + // Mock successful download of artifact content + const mockContentResponse = { + ok: true, + text: vi.fn().mockResolvedValue(JSON.stringify({ failedData: true })), + }; + + // Setup fetch to capture headers and return appropriate responses + global.fetch.mockImplementation((url, options) => { + // For all calls, verify the headers include the authorization token + if (options && options.headers) { + expect(options.headers).toEqual(expectedHeaders); + } + + // First attempted artifact request with 404 + if (url.includes(`artifacts?artifactName=${inputs.artifactName}&api-version=7.0`)) { + return mockInitialResponse; + } + // List all artifacts request + else if (url.includes("artifacts?api-version=7.0") && !url.includes("artifactName=")) { + return mockListResponse; + } + // Request for failed artifact + else if (url.includes("artifactName=spec-gen-sdk-artifact-FailedAttempt1")) { + return mockFailedArtifactResponse; + } + // Content download request + else if (url.includes("format=file&subPath=")) { + return mockContentResponse; + } + + return { + ok: false, + status: 404, + statusText: "URL not matched in test mock", + text: vi.fn().mockResolvedValue("URL not matched in test mock"), + }; + }); + + // Call function with fallbackToFailedArtifact set to true + const result = await getAzurePipelineArtifact({ + ...inputs, + token: testToken, + fallbackToFailedArtifact: true, + }); + + // Verify result contains the data from the failed artifact + expect(result).toEqual({ + artifactData: JSON.stringify({ + failedData: true, + }), + }); + + // Verify fetch was called multiple times with the auth headers + expect(global.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expectedHeaders, + }), + ); + }); + + it("should include authorization header when token is provided", async () => { + const testToken = "test-auth-token"; + + // Mock fetch responses + const mockArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { + downloadUrl: "https://dev.azure.com/download?format=zip", + }, + }), + text: vi.fn(), + }; + + const mockContentResponse = { + ok: true, + text: vi.fn().mockResolvedValue(JSON.stringify({ labelAction: true })), + }; + + // Setup fetch with a spy to capture the headers + global.fetch.mockImplementation((url, options) => { + if (url.includes("artifacts?artifactName=")) { + // Verify headers contain Authorization + expect(options.headers).toHaveProperty("Authorization", `Bearer ${testToken}`); + return mockArtifactResponse; + } else { + // Verify headers contain Authorization for the content download as well + expect(options.headers).toHaveProperty("Authorization", `Bearer ${testToken}`); + return mockContentResponse; + } + }); + + // Call function with token + const result = await getAzurePipelineArtifact({ + ...inputs, + token: testToken, + }); + + // Verify result + expect(result).toEqual({ + artifactData: JSON.stringify({ labelAction: true }), + }); + + // Verify fetch was called with the right headers + expect(global.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + "Content-Type": "application/json", + Authorization: `Bearer ${testToken}`, + }), + }), + ); + }); + + it("should handle API failure", async () => { + // Mock fetch failure + global.fetch.mockResolvedValue({ + ok: false, + status: 500, + statusText: "Server Error", + text: vi.fn().mockResolvedValue("Artifact not found"), + }); + + // Call function + const result = await getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }); + + // Verify result uses default values when artifact fetch fails + expect(result).toEqual({ + artifactData: "", + }); + + // Verify error was logged + expect(mockCore.error).toHaveBeenCalled(); + }); + + it("should complete without op when artifact does not exist", async () => { + // Mock fetch failure + global.fetch.mockResolvedValue({ + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Artifact not found"), + }); + + // Call function + const result = await getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }); + + // Verify result uses default values when artifact fetch fails + expect(result).toEqual({ + artifactData: "", + }); + }); + + it("should fallback to failed artifacts when specified and primary artifact not found", async () => { + // Mock initial fetch failure with 404 + const mockInitialResponse = { + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Artifact not found"), + }; + + // Mock list artifacts response + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "spec-gen-sdk-artifact-FailedAttempt2", + resource: { downloadUrl: "https://example.com/download1" }, + }, + { + name: "spec-gen-sdk-artifact-FailedAttempt1", + resource: { downloadUrl: "https://example.com/download2" }, + }, + ], + }), + status: 200, + statusText: "OK", + }; + + // Mock response for fetching specific failed artifact + const mockFailedArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { + downloadUrl: "https://example.com/failed-artifact-download?format=zip", + }, + }), + status: 200, + statusText: "OK", + }; + + // Mock successful download of artifact content + const mockContentResponse = { + ok: true, + text: vi.fn().mockResolvedValue( + JSON.stringify({ + failedData: true, + }), + ), + }; + + // Setup fetch to return different responses based on the URL + global.fetch.mockImplementation((url) => { + // First attempted artifact request with 404 + if (url.includes(`artifacts?artifactName=${inputs.artifactName}&api-version=7.0`)) { + return mockInitialResponse; + } + // List all artifacts request + else if (url.includes("artifacts?api-version=7.0") && !url.includes("artifactName=")) { + return mockListResponse; + } + // Request for failed artifact - notice we use the first item from mockListResponse + else if (url.includes("artifactName=spec-gen-sdk-artifact-FailedAttempt2")) { + return mockFailedArtifactResponse; + } + // Content download request + else if (url.includes("format=file&subPath=")) { + return mockContentResponse; + } + return { + ok: false, + status: 404, + statusText: "URL not matched in test mock", + text: vi.fn().mockResolvedValue("URL not matched in test mock"), + }; + }); + + // Call function with fallbackToFailedArtifact set to true + const result = await getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + fallbackToFailedArtifact: true, + }); + + // Verify result contains the data from the failed artifact + expect(result).toEqual({ + artifactData: JSON.stringify({ + failedData: true, + }), + }); + }); + + it("should throw error if resource is empty from the artifact api response", async () => { + // Mock artifact responses with 'remove' action + const mockArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({}), + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?artifactName=")) { + return mockArtifactResponse; + } + }); + + // Call function and expect it to throw + await expect( + getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }), + ).rejects.toThrow(); + }); + + it("should throw error if missing download url from the artifact api response", async () => { + // Mock artifact responses with 'remove' action + const mockArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: {}, + }), + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?artifactName=")) { + return mockArtifactResponse; + } + }); + + // Call function and expect it to throw + await expect( + getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }), + ).rejects.toThrow(); + }); + + it("should throw error when fail to fetch artifact content", async () => { + // Mock fetch responses + // First fetch - artifact metadata + const mockArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { + downloadUrl: "https://dev.azure.com/download?format=zip", + }, + }), + text: vi.fn(), + }; + + // Second fetch - artifact content + const mockContentResponse = { + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Artifact not found"), + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?artifactName=")) { + return mockArtifactResponse; + } else { + return mockContentResponse; + } + }); + + // Call function and expect it to throw + await expect( + getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }), + ).rejects.toThrow(); + }); + + it("should handle exception during processing", async () => { + // Mock fetch to throw an error + global.fetch.mockImplementation(() => { + throw new Error("Network error"); + }); + + // Start the async operation that will retry + const promise = getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + // Change default retry delay from 1000ms to 1ms to reduce test time + retryOptions: { initialDelayMs: 1 }, + }); + + // Now expect the promise to reject + await expect(promise).rejects.toThrow("Network error"); + }, 10000); + + it("should return artifact content", async () => { + // Mock fetch responses + // First fetch - artifact metadata + const mockArtifactResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { + downloadUrl: "https://dev.azure.com/download?format=zip", + }, + }), + text: vi.fn(), + }; + + // Second fetch - artifact content + const mockContentResponse = { + ok: true, + text: vi.fn().mockResolvedValue( + JSON.stringify({ + labelAction: true, + }), + ), + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?artifactName=")) { + return mockArtifactResponse; + } else { + return mockContentResponse; + } + }); + + // Call function + const result = await getAzurePipelineArtifact({ + ado_build_id: inputs.ado_build_id, + ado_project_url: inputs.ado_project_url, + artifactName: inputs.artifactName, + artifactFileName: inputs.artifactFileName, + core: inputs.core, + }); + // Verify result + expect(result).toEqual({ + artifactData: JSON.stringify({ + labelAction: true, + }), + }); + }); +}); + +describe("getAdoBuildInfoFromUrl function", () => { + // Reset mocks before each test + beforeEach(() => { + vi.resetAllMocks(); + }); + + it("should extract project URL and build ID from a valid URL", () => { + const buildUrl = "https://dev.azure.com/azure-sdk/_build/results?buildId=12345&view=logs"; + const result = getAdoBuildInfoFromUrl(buildUrl); + + expect(result).toEqual({ + projectUrl: "https://dev.azure.com/azure-sdk", + buildId: "12345", + }); + }); + + it("should extract build ID when it's not the first parameter", () => { + const buildUrl = "https://dev.azure.com/azure-sdk/_build/results?view=logs&buildId=54321"; + const result = getAdoBuildInfoFromUrl(buildUrl); + + expect(result).toEqual({ + projectUrl: "https://dev.azure.com/azure-sdk", + buildId: "54321", + }); + }); + + it("should throw error when URL format is invalid", () => { + const invalidUrl = "https://dev.azure.com/azure-sdk/wrong-format"; + + expect(() => { + getAdoBuildInfoFromUrl(invalidUrl); + }).toThrow("Could not extract build ID or project URL from the URL"); + }); + + it("should throw error when buildId is missing", () => { + const invalidUrl = "https://dev.azure.com/azure-sdk/_build/results?view=logs"; + + expect(() => { + getAdoBuildInfoFromUrl(invalidUrl); + }).toThrow("Could not extract build ID or project URL from the URL"); + }); +}); + +describe("fetchFailedArtifact function", () => { + const defaultParams = { + ado_build_id: "12345", + ado_project_url: "https://dev.azure.com/testorg/testproject", + artifactName: "spec-gen-sdk-artifact", + core: mockCore, + retryOptions: {}, + }; + + // Reset mocks before each test + beforeEach(() => { + vi.resetAllMocks(); + }); + + it("should use the provided headers when making requests", async () => { + const customHeaders = { + "Content-Type": "application/json", + Authorization: "Bearer test-token", + "Custom-Header": "test-value", + }; + + // Setup mock responses + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "spec-gen-sdk-artifact-FailedAttempt1", + resource: { downloadUrl: "https://example.com/download1" }, + }, + ], + }), + status: 200, + statusText: "OK", + }; + + const mockFetchResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { downloadUrl: "https://example.com/artifact-download" }, + }), + status: 200, + statusText: "OK", + }; + + // Setup fetch with a spy to capture the headers + global.fetch.mockImplementation((url, options) => { + // Verify that the custom headers are included in all requests + expect(options.headers).toEqual(customHeaders); + + if (url.includes("artifacts?api-version") && !url.includes("artifactName=")) { + return mockListResponse; + } else { + return mockFetchResponse; + } + }); + + // Call the function with custom headers + const response = await fetchFailedArtifact({ + ...defaultParams, + headers: customHeaders, + }); + + // Verify response is correct + expect(response).toBe(mockFetchResponse); + + // Verify fetch was called with custom headers + expect(global.fetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: customHeaders, + }), + ); + }); + + it("should fetch the failed artifact successfully", async () => { + // Setup mock responses + // First call to list artifacts + const mockArtifacts = { + value: [ + { + name: "spec-gen-sdk-artifact-FailedAttempt2", + resource: { downloadUrl: "https://example.com/download1" }, + }, + { + name: "spec-gen-sdk-artifact-FailedAttempt1", + resource: { downloadUrl: "https://example.com/download2" }, + }, + { + name: "other-artifact", + resource: { downloadUrl: "https://example.com/download3" }, + }, + ], + }; + + // Mock responses for API calls + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue(mockArtifacts), + status: 200, + statusText: "OK", + }; + + const mockFetchResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { downloadUrl: "https://example.com/artifact-download" }, + }), + status: 200, + statusText: "OK", + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?api-version")) { + return mockListResponse; + } else if (url.includes("artifactName=spec-gen-sdk-artifact-FailedAttempt2")) { + return mockFetchResponse; + } + }); + + // Call the function + const response = await fetchFailedArtifact(defaultParams); + + // Verify response is correct + expect(response).toBe(mockFetchResponse); + }); + + it("should return 404 when no matching artifacts are found", async () => { + // Mock response with no matching artifacts + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "other-artifact-1", + resource: { downloadUrl: "https://example.com/download1" }, + }, + { + name: "other-artifact-2", + resource: { downloadUrl: "https://example.com/download2" }, + }, + ], + }), + status: 200, + statusText: "OK", + }; + + global.fetch.mockResolvedValue(mockListResponse); + + // Call the function and expect it to return a 404 response + const response = await fetchFailedArtifact(defaultParams); + expect(response.ok).toBe(false); + expect(response.status).toBe(404); + expect(response.statusText).toBe( + `No artifacts found with name containing ${defaultParams.artifactName}`, + ); + }); + + it("should throw an error when listing artifacts fails", async () => { + // Mock a failed response + const mockErrorResponse = { + ok: false, + status: 500, + statusText: "Internal Server Error", + }; + + global.fetch.mockResolvedValue(mockErrorResponse); + + // Call the function and expect it to throw + await expect(fetchFailedArtifact(defaultParams)).rejects.toThrow( + `Failed to fetch artifacts: 500, Internal Server Error`, + ); + }); + + it("should sort artifacts in descending order and select the first one", async () => { + // Mock response with artifacts in unsorted order + const mockListResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "spec-gen-sdk-artifact-FailedAttempt1", + resource: { downloadUrl: "https://example.com/download1" }, + }, + { + name: "spec-gen-sdk-artifact-FailedAttempt3", + resource: { downloadUrl: "https://example.com/download3" }, + }, + { + name: "spec-gen-sdk-artifact-FailedAttempt2", + resource: { downloadUrl: "https://example.com/download2" }, + }, + ], + }), + status: 200, + statusText: "OK", + }; + + // Mock response for fetching specific artifact + const mockFetchResponse = { + ok: true, + json: vi.fn().mockResolvedValue({ + resource: { downloadUrl: "https://example.com/artifact-download" }, + }), + status: 200, + statusText: "OK", + }; + + // Setup fetch to return different responses for each call + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?api-version")) { + return mockListResponse; + } else if (url.includes("artifactName=spec-gen-sdk-artifact-FailedAttempt3")) { + return mockFetchResponse; + } + }); + + // Call the function + const response = await fetchFailedArtifact(defaultParams); + + // Verify response is correct + expect(response).toBe(mockFetchResponse); + }); +}); diff --git a/.github/workflows/test/context.test.js b/.github/workflows/test/context.test.js index 0ea09bd0bb7c..baedbdbcfa8b 100644 --- a/.github/workflows/test/context.test.js +++ b/.github/workflows/test/context.test.js @@ -12,9 +12,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).rejects.toThrow(); + await expect(extractInputs(null, context, createMockCore())).rejects.toThrow(); }); it("pull_request", async () => { @@ -36,9 +34,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -75,20 +71,23 @@ describe("extractInputs", () => { run_id: NaN, }; - await expect( - extractInputs(null, context, createMockCore()), - ).resolves.toEqual(expected); + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual(expected); context.payload.action = "unlabeled"; - await expect( - extractInputs(null, context, createMockCore()), - ).resolves.toEqual(expected); + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual(expected); + + context.payload.action = "opened"; + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual(expected); - // Action not yet supported context.payload.action = "synchronize"; - await expect( - extractInputs(null, context, createMockCore()), - ).rejects.toThrow(); + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual(expected); + + context.payload.action = "reopened"; + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual(expected); + + // Action not yet supported + context.payload.action = "assigned"; + await expect(extractInputs(null, context, createMockCore())).rejects.toThrow(); }); it("issue_comment:edited", async () => { @@ -113,9 +112,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -143,9 +140,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "", @@ -162,9 +157,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).rejects.toThrow(); + await expect(extractInputs(null, context, createMockCore())).rejects.toThrow(); }); it("workflow_run:completed:pull_request (same repo)", async () => { @@ -187,9 +180,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(null, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -198,7 +189,7 @@ describe("extractInputs", () => { }); }); - it.each([0, 1, 2])( + it.each([0, 1, 2, 3])( "workflow_run:completed:pull_request (fork repo, %s PRs)", async (numPullRequests) => { const context = { @@ -216,6 +207,7 @@ describe("extractInputs", () => { head_sha: "abc123", id: 456, repository: { + id: 1234, name: "TestRepoName", owner: { login: "TestRepoOwnerLogin", @@ -227,23 +219,40 @@ describe("extractInputs", () => { }; const github = createMockGithub(); - github.rest.repos.listPullRequestsAssociatedWithCommit.mockImplementation( - async (args) => { - console.log(JSON.stringify(args)); - return { - data: [{ number: 123 }, { number: 124 }].slice(0, numPullRequests), - }; - }, - ); + github.rest.repos.listPullRequestsAssociatedWithCommit.mockImplementation(async (args) => { + console.log(JSON.stringify(args)); + return { + data: [ + { + base: { + repo: { id: 1234 }, + }, + number: 123, + }, + // Ensure PRs to other repos are excluded + { + base: { + repo: { id: 4567 }, + }, + number: 1, + }, + // Multiple PRs to triggering repo still causes an error (TODO: #33418) + { + base: { + repo: { id: 1234 }, + }, + number: 124, + }, + ].slice(0, numPullRequests), + }; + }); if (numPullRequests === 0) { github.rest.search.issuesAndPullRequests.mockResolvedValue({ data: { total_count: 0, items: [] }, }); - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -255,9 +264,7 @@ describe("extractInputs", () => { data: { total_count: 1, items: [{ number: 789 }] }, }); - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -266,10 +273,9 @@ describe("extractInputs", () => { }); expect(github.rest.search.issuesAndPullRequests).toHaveBeenCalled(); - } else if (numPullRequests === 1) { - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + } else if (numPullRequests === 1 || numPullRequests === 2) { + // Second PR is to a different repo, so expect same behavior with or without it + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -277,14 +283,13 @@ describe("extractInputs", () => { run_id: 456, }); } else { - await expect( - extractInputs(github, context, createMockCore()), - ).rejects.toThrow("Unexpected number of pull requests"); + // Multiple PRs to triggering repo still causes an error (TODO: #33418) + await expect(extractInputs(github, context, createMockCore())).rejects.toThrow( + "Unexpected number of pull requests", + ); } - expect( - github.rest.repos.listPullRequestsAssociatedWithCommit, - ).toHaveBeenCalledWith({ + expect(github.rest.repos.listPullRequestsAssociatedWithCommit).toHaveBeenCalledWith({ owner: "TestRepoOwnerLoginFork", repo: "TestRepoName", commit_sha: "abc123", @@ -317,9 +322,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -330,16 +333,14 @@ describe("extractInputs", () => { github.rest.actions.listWorkflowRunArtifacts.mockResolvedValue({ data: { artifacts: [{ name: "issue-number=not-a-number" }] }, }); - await expect( - extractInputs(github, context, createMockCore()), - ).rejects.toThrow(/invalid issue-number/i); + await expect(extractInputs(github, context, createMockCore())).rejects.toThrow( + /invalid issue-number/i, + ); github.rest.actions.listWorkflowRunArtifacts.mockResolvedValue({ data: { artifacts: [] }, }); - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -359,9 +360,7 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(null, context, createMockCore()), - ).rejects.toThrow(); + await expect(extractInputs(null, context, createMockCore())).rejects.toThrow(); }); it("workflow_run:completed:check_run", async () => { @@ -391,9 +390,7 @@ describe("extractInputs", () => { github.rest.actions.listWorkflowRunArtifacts.mockResolvedValue({ data: { artifacts: [] }, }); - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", head_sha: "abc123", @@ -409,8 +406,7 @@ describe("extractInputs", () => { payload: { action: "completed", check_run: { - details_url: - "https://dev.azure.com/abc/123-456/_build/results?buildId=56789", + details_url: "https://dev.azure.com/abc/123-456/_build/results?buildId=56789", head_sha: "abc123", }, repository: { @@ -422,16 +418,13 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(github, context, createMockCore()), - ).resolves.toEqual({ + await expect(extractInputs(github, context, createMockCore())).resolves.toEqual({ owner: "TestRepoOwnerLogin", repo: "TestRepoName", issue_number: NaN, head_sha: "abc123", run_id: NaN, - ado_build_id: "56789", - ado_project_url: "https://dev.azure.com/abc/123-456", + details_url: "https://dev.azure.com/abc/123-456/_build/results?buildId=56789", }); }); @@ -442,11 +435,10 @@ describe("extractInputs", () => { payload: { action: "completed", check_run: { - details_url: "https://debc/123-456/_build/result/buildId=56789", + details_url: "https://dev.azure.com/abc/123-456/_build/results?buildId=56789", head_sha: "abc123", }, repository: { - name: "TestRepoName", owner: { login: "TestRepoOwnerLogin", }, @@ -454,16 +446,34 @@ describe("extractInputs", () => { }, }; - await expect( - extractInputs(github, context, createMockCore()), - ).rejects.toThrow("from check run details URL"); - - context.payload.check_run.details_url = - "https://dev.azure.com/abc/123-456/_build/results?buildId=56789"; - delete context.payload.repository.name; + await expect(extractInputs(github, context, createMockCore())).rejects.toThrow( + "from context payload", + ); + }); +}); - await expect( - extractInputs(github, context, createMockCore()), - ).rejects.toThrow("from context payload"); +it("check_run:completed", async () => { + const context = { + eventName: "check_suite", + payload: { + action: "completed", + check_suite: { + head_sha: "head_sha", + }, + repository: { + name: "TestRepoName", + owner: { + login: "TestRepoOwnerLogin", + }, + }, + }, + }; + + await expect(extractInputs(createMockGithub(), context, createMockCore())).resolves.toEqual({ + owner: "TestRepoOwnerLogin", + repo: "TestRepoName", + head_sha: "head_sha", + issue_number: NaN, + run_id: NaN, }); }); diff --git a/.github/workflows/test/core-logger.test.js b/.github/workflows/test/core-logger.test.js index 201afbf1411d..47c63bdae0f5 100644 --- a/.github/workflows/test/core-logger.test.js +++ b/.github/workflows/test/core-logger.test.js @@ -12,6 +12,15 @@ describe("CoreLogger", () => { expect(core.debug).toBeCalledWith("test debug"); }); + it("error", async () => { + const core = createMockCore(); + const logger = new CoreLogger(core); + + logger.error("test error"); + + expect(core.error).toBeCalledWith("test error"); + }); + it("info", async () => { const core = createMockCore(); const logger = new CoreLogger(core); diff --git a/.github/workflows/test/github.test.js b/.github/workflows/test/github.test.js new file mode 100644 index 000000000000..9dfcef9dbd0a --- /dev/null +++ b/.github/workflows/test/github.test.js @@ -0,0 +1,211 @@ +import { describe, expect, it, vi } from "vitest"; +import { getCheckRuns, getWorkflowRuns, writeToActionsSummary } from "../src/github.js"; +import { createMockContext, createMockCore, createMockGithub } from "./mocks.js"; + +const mockCore = createMockCore(); + +describe("getCheckRuns", () => { + it("returns matching check_run", async () => { + const githubMock = createMockGithub(); + githubMock.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [ + { + name: "checkRunName", + status: "completed", + conclusion: "success", + }, + ], + }, + }); + + const actual = await getCheckRuns( + githubMock, + createMockContext(), + createMockCore(), + "checkRunName", + "head_sha", + ); + + expect(actual).toEqual([ + expect.objectContaining({ + name: "checkRunName", + status: "completed", + conclusion: "success", + }), + ]); + }); + + it("returns null when no check matches", async () => { + const githubMock = createMockGithub(); + githubMock.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [], + }, + }); + + const actual = await getCheckRuns(githubMock, createMockContext(), "checkRunName", "head_sha"); + + expect(actual).toEqual([]); + }); + + it("throws when multiple checks match", async () => { + const githubMock = createMockGithub(); + const earlierDate = "2025-04-01T00:00:00Z"; + const laterDate = "2025-04-02T00:00:00Z"; + githubMock.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [ + { + name: "checkRunName", + status: "completed", + conclusion: "success", + completed_at: earlierDate, + }, + { + name: "checkRunName", + status: "completed", + conclusion: "success", + completed_at: laterDate, + }, + ], + }, + }); + + const actual = await getCheckRuns(githubMock, createMockContext(), "checkRunName", "head_sha"); + + expect(actual).toEqual([ + expect.objectContaining({ + name: "checkRunName", + status: "completed", + conclusion: "success", + completed_at: laterDate, + }), + expect.objectContaining({ + name: "checkRunName", + status: "completed", + conclusion: "success", + completed_at: earlierDate, + }), + ]); + }); +}); + +describe("getWorkflowRuns", () => { + it("returns matching workflow_run", async () => { + const githubMock = createMockGithub(); + githubMock.rest.actions.listWorkflowRunsForRepo = vi.fn().mockResolvedValue({ + data: { + workflow_runs: [ + { + name: "workflowName", + status: "completed", + conclusion: "success", + }, + ], + }, + }); + + const actual = await getWorkflowRuns( + githubMock, + createMockContext(), + "workflowName", + "head_sha", + ); + + expect(actual).toEqual([ + expect.objectContaining({ + name: "workflowName", + status: "completed", + conclusion: "success", + }), + ]); + }); + + it("returns null when no workflow matches", async () => { + const githubMock = createMockGithub(); + githubMock.rest.actions.listWorkflowRunsForRepo = vi.fn().mockResolvedValue({ + data: { + workflow_runs: [ + { + name: "otherWorkflowName", + }, + ], + }, + }); + + const actual = await getWorkflowRuns( + githubMock, + createMockContext(), + "workflowName", + "head_sha", + ); + + expect(actual).toEqual([]); + }); + + it("returns latest when multiple workflows match", async () => { + const githubMock = createMockGithub(); + const earlyDate = "2025-04-01T00:00:00Z"; + const laterDate = "2025-04-02T00:00:00Z"; + githubMock.rest.actions.listWorkflowRunsForRepo = vi.fn().mockResolvedValue({ + data: { + workflow_runs: [ + { + name: "workflowName", + status: "completed", + conclusion: "success", + updated_at: earlyDate, + }, + { + name: "workflowName", + status: "completed", + conclusion: "success", + updated_at: laterDate, + }, + ], + }, + }); + + const actual = await getWorkflowRuns( + githubMock, + createMockContext(), + "workflowName", + "head_sha", + ); + + expect(actual).toEqual([ + expect.objectContaining({ + updated_at: laterDate, + }), + expect.objectContaining({ + updated_at: earlyDate, + }), + ]); + }); +}); + +describe("writeToActionsSummary function", () => { + it("should add content to the summary and write it", async () => { + // Call function + const result = await writeToActionsSummary("Test content", mockCore); + + // Verify result + expect(result).undefined; + + // Verify summary methods were called + expect(mockCore.summary.addRaw).toHaveBeenCalledWith("Test content"); + expect(mockCore.summary.write).toHaveBeenCalled(); + }); + + it("should handle exception", async () => { + // Create a mock with a write method that throws an error + const mockCoreWithError = createMockCore(); + mockCoreWithError.summary.write.mockRejectedValue(new Error("Mock write error")); + + // Call function and validate it throws + await expect(writeToActionsSummary("Test content", mockCoreWithError)).rejects.toThrow( + "Failed to write to the GitHub Actions summary", + ); + }); +}); diff --git a/.github/workflows/test/mocks.js b/.github/workflows/test/mocks.js index ed845f6f8d5b..d6fdfafc66de 100644 --- a/.github/workflows/test/mocks.js +++ b/.github/workflows/test/mocks.js @@ -13,12 +13,9 @@ export function createMockGithub() { }, rest: { actions: { - listWorkflowRunArtifacts: vi - .fn() - .mockResolvedValue({ data: { artifacts: [] } }), - listWorkflowRunsForRepo: vi - .fn() - .mockResolvedValue({ data: { workflow_runs: [] } }), + listJobsForWorkflowRun: vi.fn().mockResolvedValue({ data: [] }), + listWorkflowRunArtifacts: vi.fn().mockResolvedValue({ data: { artifacts: [] } }), + listWorkflowRunsForRepo: vi.fn().mockResolvedValue({ data: { workflow_runs: [] } }), }, checks: { listForRef: vi.fn().mockResolvedValue({ data: { check_runs: [] } }), @@ -32,6 +29,7 @@ export function createMockGithub() { get: vi.fn(), }, repos: { + createCommitStatus: vi.fn(), listPullRequestsAssociatedWithCommit: vi.fn().mockResolvedValue({ data: [], }), @@ -48,12 +46,19 @@ export function createMockCore() { return { debug: vi.fn(console.debug), info: vi.fn(console.log), + notice: vi.fn(console.log), error: vi.fn(console.error), warning: vi.fn(console.warn), isDebug: vi.fn().mockReturnValue(true), - setOutput: vi.fn((name, value) => - console.log(`setOutput('${name}', '${value}')`), - ), + setOutput: vi.fn((name, value) => console.log(`setOutput('${name}', '${value}')`)), + setFailed: vi.fn((msg) => console.log(`setFailed('${msg}')`)), + summary: { + // eslint-disable-next-line no-unused-vars + addRaw: vi.fn(function (content) { + return this; // Return 'this' for method chaining + }), + write: vi.fn().mockResolvedValue(undefined), + }, }; } diff --git a/.github/workflows/test/retries.test.js b/.github/workflows/test/retries.test.js new file mode 100644 index 000000000000..ba7f2a8bd5c5 --- /dev/null +++ b/.github/workflows/test/retries.test.js @@ -0,0 +1,75 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import { retry, fetchWithRetry } from "../src/retries.js"; + +// Mock console.log to avoid cluttering test output +const originalConsoleLog = console.log; +beforeEach(() => { + console.log = vi.fn(); +}); +afterEach(() => { + console.log = originalConsoleLog; + vi.resetAllMocks(); + vi.useRealTimers(); +}); + +describe("retry function", () => { + it("should resolve immediately when function succeeds on first attempt", async () => { + const mockFn = vi.fn().mockResolvedValue("success"); + const result = await retry(mockFn); + + expect(result).toBe("success"); + expect(mockFn).toHaveBeenCalledTimes(1); + }); + + it("should retry when function fails and then succeed", async () => { + const mockFn = vi.fn().mockRejectedValueOnce(new Error("failure")).mockResolvedValue("success"); + + // Use fake timers + vi.useFakeTimers(); + + // Start the retry process with a specific initialDelayMs + const initialDelayMs = 100; + const promise = retry(mockFn, { initialDelayMs }); + + // First attempt fails immediately + await vi.advanceTimersByTimeAsync(initialDelayMs); + + // Get the result (third attempt should succeed) + const result = await promise; + expect(result).toBe("success"); + }); + + it("should throw error after maximum retries", async () => { + const mockError = new Error("persistent failure"); + const mockFn = vi.fn().mockRejectedValue(mockError); + const mockLogger = vi.fn(); + + await expect( + retry(mockFn, { + maxRetries: 1, + logger: mockLogger, + initialDelayMs: 10, // keep it fast + }), + ).rejects.toThrow("persistent failure"); + }); +}); + +describe("fetchWithRetry function", () => { + beforeEach(() => { + // Mock global fetch + global.fetch = vi.fn(); + }); + + it("should call fetch with provided url and options", async () => { + const mockResponse = { + ok: true, + json: () => Promise.resolve({ data: "test" }), + }; + global.fetch.mockResolvedValue(mockResponse); + + const url = "https://example.com/api"; + const options = { method: "POST", body: JSON.stringify({ key: "value" }) }; + const response = await fetchWithRetry(url, options); + expect(response).toBe(mockResponse); + }); +}); diff --git a/.github/workflows/test/sdk-breaking-change-labels.test.js b/.github/workflows/test/sdk-breaking-change-labels.test.js index c2c64e225bb4..244eb4dd6b1f 100644 --- a/.github/workflows/test/sdk-breaking-change-labels.test.js +++ b/.github/workflows/test/sdk-breaking-change-labels.test.js @@ -1,15 +1,8 @@ -import { describe, expect, it, vi, beforeEach } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { sdkLabels } from "../../shared/src/sdk-types.js"; import { LabelAction } from "../src/label.js"; -import { - createMockCore, - createMockGithub, - createMockContext, -} from "./mocks.js"; -import { - getLabelAndAction, - getLabelAndActionImpl, -} from "../src/sdk-breaking-change-labels.js"; -import { sdkLabels } from "../../src/sdk-types.js"; +import { getLabelAndAction, getLabelAndActionImpl } from "../src/sdk-breaking-change-labels.js"; +import { createMockContext, createMockCore, createMockGithub } from "./mocks.js"; // Mock dependencies vi.mock("../src/context.js", () => ({ @@ -33,8 +26,7 @@ describe("sdk-breaking-change-labels", () => { it("should extract inputs and call getLabelAndActionImpl", async () => { // Mock extracted inputs const mockInputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -62,18 +54,11 @@ describe("sdk-breaking-change-labels", () => { JSON.stringify({ labelAction: true, language, + prNumber: "123", }), ), }; - // Mock PR search results - mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ - data: { - total_count: 1, - items: [{ number: 123, html_url: "https://github.com/pr/123" }], - }, - }); - // Setup fetch to return different responses for each call global.fetch.mockImplementation((url) => { if (url.includes("artifacts?artifactName=")) { @@ -96,19 +81,11 @@ describe("sdk-breaking-change-labels", () => { labelAction: LabelAction.Add, issueNumber: 123, }); - - // Verify mocks were called correctly - expect(mockGithub.rest.search.issuesAndPullRequests).toHaveBeenCalledWith( - { - q: `sha:abc123 type:pr state:open`, - }, - ); }); it("should correctly set labelAction to Remove", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -133,6 +110,7 @@ describe("sdk-breaking-change-labels", () => { JSON.stringify({ labelAction: false, language, + prNumber: "123", }), ), }; @@ -146,14 +124,6 @@ describe("sdk-breaking-change-labels", () => { } }); - // Mock PR search - mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ - data: { - total_count: 1, - items: [{ number: 123, html_url: "https://github.com/pr/123" }], - }, - }); - // Call function const result = await getLabelAndAction({ github: mockGithub, @@ -171,8 +141,7 @@ describe("sdk-breaking-change-labels", () => { it("should throw error with invalid inputs", async () => { // Setup inputs const inputs = { - ado_build_id: "", - ado_project_url: "https://dev.azure.com/project", + details_url: "", head_sha: "abc123", }; @@ -195,8 +164,7 @@ describe("sdk-breaking-change-labels", () => { it("should handle API failure", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -208,18 +176,9 @@ describe("sdk-breaking-change-labels", () => { text: vi.fn().mockResolvedValue("Artifact not found"), }); - // Mock PR search success - mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ - data: { - total_count: 1, - items: [{ number: 123, html_url: "https://github.com/pr/123" }], - }, - }); - // Call function const result = await getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, + details_url: inputs.details_url, head_sha: inputs.head_sha, github: mockGithub, core: mockCore, @@ -241,31 +200,55 @@ describe("sdk-breaking-change-labels", () => { it("should complete without op when artifact does not exist", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; - // Mock fetch failure - global.fetch.mockResolvedValue({ - ok: false, - status: 404, - statusText: "Not Found", - text: vi.fn().mockResolvedValue("Artifact not found"), - }); - - // Mock PR search success - mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ - data: { - total_count: 1, - items: [{ number: 123, html_url: "https://github.com/pr/123" }], - }, + // Mock fetch to handle the artifact URL with 404 error and fallback behavior + global.fetch.mockImplementation((url) => { + if (url.includes("artifacts?artifactName=spec-gen-sdk-artifact")) { + // Initial fetch for the specific artifact returns 404 + return Promise.resolve({ + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Artifact not found"), + }); + } else if (url.includes("artifacts?api-version=7.0") && !url.includes("artifactName=")) { + // List artifacts API call (used by fetchFailedArtifact) + return Promise.resolve({ + ok: true, + json: vi.fn().mockResolvedValue({ + value: [ + { + name: "spec-gen-sdk-artifact-failed", + id: "12345", + resource: { + downloadUrl: "https://dev.azure.com/download-failed?format=zip", + }, + }, + ], + }), + }); + } else if (url.includes("artifactName=spec-gen-sdk-artifact-failed")) { + // Fetch for the failed artifact version returns 404 too + return Promise.resolve({ + ok: false, + status: 404, + statusText: "Not Found", + text: vi.fn().mockResolvedValue("Failed artifact not found either"), + }); + } + // Default response for other URLs + return Promise.resolve({ + ok: true, + text: vi.fn().mockResolvedValue("{}"), + }); }); // Call function const result = await getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, + details_url: inputs.details_url, head_sha: inputs.head_sha, github: mockGithub, core: mockCore, @@ -282,8 +265,7 @@ describe("sdk-breaking-change-labels", () => { it("should throw error if resource is empty from the artifact api response", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -303,8 +285,7 @@ describe("sdk-breaking-change-labels", () => { // Call function and expect it to throw await expect( getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, + details_url: inputs.details_url, head_sha: inputs.head_sha, github: mockGithub, core: mockCore, @@ -315,8 +296,7 @@ describe("sdk-breaking-change-labels", () => { it("should throw error if missing download url from the artifact api response", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -338,8 +318,7 @@ describe("sdk-breaking-change-labels", () => { // Call function and expect it to throw await expect( getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, + details_url: inputs.details_url, head_sha: inputs.head_sha, github: mockGithub, core: mockCore, @@ -350,8 +329,7 @@ describe("sdk-breaking-change-labels", () => { it("should throw error when fail to fetch artifact content", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -387,8 +365,7 @@ describe("sdk-breaking-change-labels", () => { // Call function and expect it to throw await expect( getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, + details_url: inputs.details_url, head_sha: inputs.head_sha, github: mockGithub, core: mockCore, @@ -399,8 +376,7 @@ describe("sdk-breaking-change-labels", () => { it("should handle exception during processing", async () => { // Setup inputs const inputs = { - ado_build_id: "12345", - ado_project_url: "https://dev.azure.com/project", + details_url: "https://dev.azure.com/project/_build/results?buildId=12345", head_sha: "abc123", }; @@ -409,24 +385,18 @@ describe("sdk-breaking-change-labels", () => { throw new Error("Network error"); }); - // Mock PR search success - mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ - data: { - total_count: 1, - items: [{ number: 123, html_url: "https://github.com/pr/123" }], - }, + // Start the async operation that will retry + const promise = getLabelAndActionImpl({ + details_url: inputs.details_url, + head_sha: inputs.head_sha, + github: mockGithub, + core: mockCore, + // Change default retry delay from 1000ms to 1ms to reduce test time + retryOptions: { initialDelayMs: 1 }, }); - // Call function and expect it to throw - await expect( - getLabelAndActionImpl({ - ado_build_id: inputs.ado_build_id, - ado_project_url: inputs.ado_project_url, - head_sha: inputs.head_sha, - github: mockGithub, - core: mockCore, - }), - ).rejects.toThrow(); - }); + // Now expect the promise to reject + await expect(promise).rejects.toThrow("Network error"); + }, 10000); }); }); diff --git a/.github/workflows/test/set-status.test.js b/.github/workflows/test/set-status.test.js new file mode 100644 index 000000000000..08136fd97dc3 --- /dev/null +++ b/.github/workflows/test/set-status.test.js @@ -0,0 +1,288 @@ +import { beforeEach, describe, expect, it } from "vitest"; +import { setStatusImpl } from "../src/set-status.js"; + +import { CheckConclusion, CheckStatus, CommitStatusState } from "../src/github.js"; +import { createMockCore, createMockGithub } from "./mocks.js"; + +describe("setStatusImpl", () => { + let core; + let github; + + beforeEach(() => { + core = createMockCore(); + github = createMockGithub(); + }); + + it("throws if inputs null", async () => { + await expect(setStatusImpl({})).rejects.toThrow(); + }); + + it("sets success if approved by label", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }, { name: "Approved-Avocado" }], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger Avocado - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger Avocado", + overridingLabel: "Approved-Avocado", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.SUCCESS, + context: "[TEST-IGNORE] Swagger Avocado", + description: "Found label 'Approved-Avocado'", + target_url: "https://test.com/set_status_url", + }); + }); + + it("sets success with multiple comma-separated labels - first label matches", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }, { name: "BreakingChange-Approved-Benign" }], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger BreakingChange", + overridingLabel: + "BreakingChange-Approved-Benign,BreakingChange-Approved-BugFix,BreakingChange-Approved-UserImpact", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.SUCCESS, + context: "[TEST-IGNORE] Swagger BreakingChange", + description: "Found label 'BreakingChange-Approved-Benign'", + target_url: "https://test.com/set_status_url", + }); + }); + + it("handles comma-separated labels with whitespace", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }, { name: "BreakingChange-Approved-UserImpact" }], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger BreakingChange", + overridingLabel: + "BreakingChange-Approved-Benign, BreakingChange-Approved-BugFix , BreakingChange-Approved-UserImpact", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.SUCCESS, + context: "[TEST-IGNORE] Swagger BreakingChange", + description: "Found label 'BreakingChange-Approved-UserImpact'", + target_url: "https://test.com/set_status_url", + }); + }); + + it("handles comma-separated labels with empty entries", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }, { name: "BreakingChange-Approved-Security" }], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger BreakingChange", + overridingLabel: "BreakingChange-Approved-Benign,,BreakingChange-Approved-Security,", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.SUCCESS, + context: "[TEST-IGNORE] Swagger BreakingChange", + description: "Found label 'BreakingChange-Approved-Security'", + target_url: "https://test.com/set_status_url", + }); + }); + + it("does not set success when none of the multiple labels match", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }, { name: "SomeOtherLabel" }], + }); + + github.rest.actions.listWorkflowRunsForRepo.mockResolvedValue({ + data: [], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger BreakingChange", + overridingLabel: + "BreakingChange-Approved-Benign,BreakingChange-Approved-BugFix,BreakingChange-Approved-UserImpact", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.PENDING, + context: "[TEST-IGNORE] Swagger BreakingChange", + target_url: "https://test.com/set_status_url", + }); + }); + + it("handles empty overriding label", async () => { + github.rest.issues.listLabelsOnIssue.mockResolvedValue({ + data: [{ name: "test" }], + }); + + github.rest.actions.listWorkflowRunsForRepo.mockResolvedValue({ + data: [], + }); + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger BreakingChange - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger BreakingChange", + overridingLabel: "", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: CommitStatusState.PENDING, + context: "[TEST-IGNORE] Swagger BreakingChange", + target_url: "https://test.com/set_status_url", + }); + }); + + it.each([ + [ + CheckStatus.COMPLETED, + CheckConclusion.SUCCESS, + CommitStatusState.SUCCESS, + "https://test.com/workflow_run_html_url", + ], + [ + CheckStatus.COMPLETED, + CheckConclusion.FAILURE, + CommitStatusState.FAILURE, + "https://test.com/job_html_url?pr=123", + ], + [ + CheckStatus.IN_PROGRESS, + null, + CommitStatusState.PENDING, + "https://test.com/workflow_run_html_url", + ], + [null, null, CommitStatusState.PENDING, "https://test.com/set_status_url"], + ])("(%s, %s, %s) => %s", async (checkStatus, checkConclusion, commitStatusState, targetUrl) => { + if (checkStatus) { + github.rest.actions.listWorkflowRunsForRepo.mockResolvedValue({ + data: [ + { + name: "[TEST-IGNORE] Swagger Avocado - Analyze Code", + status: checkStatus, + conclusion: checkConclusion, + updated_at: "2025-01-01", + html_url: "https://test.com/workflow_run_html_url", + }, + ], + }); + + if ( + checkConclusion === CheckConclusion.SUCCESS || + checkConclusion === CheckConclusion.FAILURE + ) { + github.rest.actions.listJobsForWorkflowRun.mockResolvedValue({ + data: [ + { + conclusion: checkConclusion, + html_url: "https://test.com/job_html_url", + }, + ], + }); + } + } + + await expect( + setStatusImpl({ + owner: "test-owner", + repo: "test-repo", + head_sha: "test-head-sha", + issue_number: 123, + target_url: "https://test.com/set_status_url", + github, + core, + monitoredWorkflowName: "[TEST-IGNORE] Swagger Avocado - Analyze Code", + requiredStatusName: "[TEST-IGNORE] Swagger Avocado", + overridingLabel: "Approved-Avocado", + }), + ).resolves.toBeUndefined(); + + expect(github.rest.repos.createCommitStatus).toBeCalledWith({ + owner: "test-owner", + repo: "test-repo", + sha: "test-head-sha", + state: commitStatusState, + context: "[TEST-IGNORE] Swagger Avocado", + target_url: targetUrl, + }); + }); +}); diff --git a/.github/workflows/test/spec-gen-sdk-status.test.js b/.github/workflows/test/spec-gen-sdk-status.test.js new file mode 100644 index 000000000000..9c88e28cfc3b --- /dev/null +++ b/.github/workflows/test/spec-gen-sdk-status.test.js @@ -0,0 +1,335 @@ +/* eslint-disable no-unused-vars */ +// @ts-check +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { setSpecGenSdkStatusImpl } from "../src/spec-gen-sdk-status.js"; +import * as artifacts from "../src/artifacts.js"; +import * as github from "../src/github.js"; +import { createMockGithub, createMockCore } from "./mocks.js"; +import fs from "fs"; + +describe("spec-gen-sdk-status", () => { + let mockGithub; + let mockCore; + let getAzurePipelineArtifactMock; + let writeToActionsSummaryMock; + let appendFileSyncMock; + + beforeEach(() => { + // Setup mocks using the helper functions + mockGithub = createMockGithub(); + mockCore = createMockCore(); + + // Setup specific mocks + getAzurePipelineArtifactMock = vi + .spyOn(artifacts, "getAzurePipelineArtifact") + .mockImplementation(async ({ ado_build_id, ado_project_url, artifactName }) => { + return { + artifactData: JSON.stringify({ + language: "test-language", + result: "succeeded", + isSpecGenSdkCheckRequired: true, + }), + }; + }); + + writeToActionsSummaryMock = vi + .spyOn(github, "writeToActionsSummary") + .mockImplementation(async (content, core) => { + // Implementation that just returns + return; + }); + + appendFileSyncMock = vi.spyOn(fs, "appendFileSync").mockImplementation(vi.fn()); + + // Reset mock call counts + vi.clearAllMocks(); + + // Mock environment variable + process.env.GITHUB_STEP_SUMMARY = "/tmp/test-summary.md"; + }); + + afterEach(() => { + // Restore mocks + getAzurePipelineArtifactMock.mockRestore(); + writeToActionsSummaryMock.mockRestore(); + appendFileSyncMock.mockRestore(); + }); + + it("should set pending status when checks are not completed", async () => { + // Setup GitHub API to return incomplete checks + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "in_progress", + conclusion: null, + }, + ], + }, + }); + + // Call the function + await setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }); + + // Verify the right status was set + expect(mockGithub.rest.repos.createCommitStatus).toHaveBeenCalledWith( + expect.objectContaining({ + owner: "testOwner", + repo: "testRepo", + sha: "testSha", + state: "pending", + }), + ); + }); + + it("should set success status when all checks are completed successfully", async () => { + // Mock check runs with completed status + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "success", + details_url: "https://dev.azure.com/project/_build/results?buildId=123", + }, + ], + }, + }); + + // Mock getAzurePipelineArtifact to return success data + getAzurePipelineArtifactMock.mockResolvedValue({ + artifactData: JSON.stringify({ + language: "test-language", + result: "succeeded", + isSpecGenSdkCheckRequired: true, + }), + }); + + // Call the function + await setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }); + + // Verify the right status was set + expect(mockGithub.rest.repos.createCommitStatus).toHaveBeenCalledWith( + expect.objectContaining({ + owner: "testOwner", + repo: "testRepo", + sha: "testSha", + state: "success", + description: "SDK Validation CI checks succeeded", + }), + ); + }); + + it("should set failure status when required checks fail", async () => { + // Mock check runs with completed status but failed required checks + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "success", + details_url: "https://dev.azure.com/project/_build/results?buildId=123", + }, + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "failure", + details_url: "https://dev.azure.com/project/_build/results?buildId=456", + }, + ], + }, + }); + + // Mock getAzurePipelineArtifact to return mixed results + getAzurePipelineArtifactMock.mockImplementation(async ({ ado_build_id }) => { + if (ado_build_id === "123") { + return { + artifactData: JSON.stringify({ + language: "test-language-1", + result: "succeeded", + isSpecGenSdkCheckRequired: true, + }), + }; + } else { + return { + artifactData: JSON.stringify({ + language: "test-language-2", + result: "failed", + isSpecGenSdkCheckRequired: true, + }), + }; + } + }); + + // Call the function + await setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }); + + // Verify the right status was set + expect(mockGithub.rest.repos.createCommitStatus).toHaveBeenCalledWith( + expect.objectContaining({ + owner: "testOwner", + repo: "testRepo", + sha: "testSha", + state: "failure", + description: expect.stringContaining("failed for"), + }), + ); + }); + + it("should write summary to GitHub Actions summary", async () => { + // Mock check runs + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "success", + details_url: "https://dev.azure.com/project/_build/results?buildId=123", + }, + ], + }, + }); + + // Call the function + await setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }); + + // Verify summary was written + expect(writeToActionsSummaryMock).toHaveBeenCalled(); + expect(writeToActionsSummaryMock.mock.calls[0][0]).toContain("SDK Validation CI Checks Result"); + }); + + it("should handle artifact download failures", async () => { + // Mock check runs + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "success", + details_url: "https://dev.azure.com/project/_build/results?buildId=123", + }, + ], + }, + }); + + // Mock artifact download failure + getAzurePipelineArtifactMock.mockResolvedValue({ artifactData: "" }); + + // Expect the function to throw an error + await expect( + setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }), + ).rejects.toThrow("Artifact 'spec-gen-sdk-artifact' not found"); + }); + + it("should handle non-required checks that fail", async () => { + // Mock check runs with completed status but failed non-required checks + mockGithub.rest.checks.listForRef.mockResolvedValue({ + data: { + check_runs: [ + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "success", + details_url: "https://dev.azure.com/project/_build/results?buildId=123", + }, + { + app: { name: "Azure Pipelines" }, + name: "SDK Validation", + status: "completed", + conclusion: "failure", + details_url: "https://dev.azure.com/project/_build/results?buildId=456", + }, + ], + }, + }); + + // Mock getAzurePipelineArtifact to return mixed results + getAzurePipelineArtifactMock.mockImplementation(async ({ ado_build_id }) => { + if (ado_build_id === "123") { + return { + artifactData: JSON.stringify({ + language: "test-language-1", + result: "succeeded", + isSpecGenSdkCheckRequired: true, + }), + }; + } else { + return { + artifactData: JSON.stringify({ + language: "test-language-2", + result: "failed", + isSpecGenSdkCheckRequired: false, // Not required + }), + }; + } + }); + + // Call the function + await setSpecGenSdkStatusImpl({ + owner: "testOwner", + repo: "testRepo", + head_sha: "testSha", + target_url: "https://example.com", + github: mockGithub, + core: mockCore, + }); + + // Verify the right status was set (success since only non-required failed) + expect(mockGithub.rest.repos.createCommitStatus).toHaveBeenCalledWith( + expect.objectContaining({ + owner: "testOwner", + repo: "testRepo", + sha: "testSha", + state: "success", + description: "SDK Validation CI checks succeeded", + }), + ); + }); +}); diff --git a/.github/workflows/test/update-labels.test.js b/.github/workflows/test/update-labels.test.js index 506315f6d173..196c65cfaf13 100644 --- a/.github/workflows/test/update-labels.test.js +++ b/.github/workflows/test/update-labels.test.js @@ -1,11 +1,7 @@ import { describe, expect, it } from "vitest"; import { PER_PAGE_MAX } from "../src/github.js"; import updateLabels, { updateLabelsImpl } from "../src/update-labels.js"; -import { - createMockCore, - createMockGithub, - createMockRequestError, -} from "./mocks.js"; +import { createMockCore, createMockGithub, createMockRequestError } from "./mocks.js"; describe("updateLabels", () => { it("loads inputs from env", async () => { @@ -311,47 +307,42 @@ describe("updateLabelsImpl", () => { expect(github.rest.issues.removeLabel).toBeCalledTimes(0); }); - it.each([404, 500, 501])( - "handles error removing label (%s)", - async (status) => { - const github = createMockGithub(); - github.rest.actions.listWorkflowRunArtifacts.mockResolvedValue({ - data: { - artifacts: [{ name: "label-foo=false" }], - }, - }); - github.rest.issues.removeLabel.mockRejectedValue( - createMockRequestError(status), - ); + it.each([404, 500, 501])("handles error removing label (%s)", async (status) => { + const github = createMockGithub(); + github.rest.actions.listWorkflowRunArtifacts.mockResolvedValue({ + data: { + artifacts: [{ name: "label-foo=false" }], + }, + }); + github.rest.issues.removeLabel.mockRejectedValue(createMockRequestError(status)); - const updateLabelsImplPromise = updateLabelsImpl({ - owner: "owner", - repo: "repo", - issue_number: 123, - run_id: 456, - github: github, - core: createMockCore(), - }); + const updateLabelsImplPromise = updateLabelsImpl({ + owner: "owner", + repo: "repo", + issue_number: 123, + run_id: 456, + github: github, + core: createMockCore(), + }); - if (status == 404) { - await expect(updateLabelsImplPromise).resolves.toBeUndefined(); - } else { - await expect(updateLabelsImplPromise).rejects.toThrow(); - } + if (status == 404) { + await expect(updateLabelsImplPromise).resolves.toBeUndefined(); + } else { + await expect(updateLabelsImplPromise).rejects.toThrow(); + } - expect(github.rest.actions.listWorkflowRunArtifacts).toBeCalledWith({ - owner: "owner", - repo: "repo", - run_id: 456, - per_page: PER_PAGE_MAX, - }); - expect(github.rest.issues.addLabels).toBeCalledTimes(0); - expect(github.rest.issues.removeLabel).toBeCalledWith({ - owner: "owner", - repo: "repo", - issue_number: 123, - name: "foo", - }); - }, - ); + expect(github.rest.actions.listWorkflowRunArtifacts).toBeCalledWith({ + owner: "owner", + repo: "repo", + run_id: 456, + per_page: PER_PAGE_MAX, + }); + expect(github.rest.issues.addLabels).toBeCalledTimes(0); + expect(github.rest.issues.removeLabel).toBeCalledWith({ + owner: "owner", + repo: "repo", + issue_number: 123, + name: "foo", + }); + }); }); diff --git a/.github/workflows/test/verify-run-status.test.js b/.github/workflows/test/verify-run-status.test.js new file mode 100644 index 000000000000..28da7ef61cc7 --- /dev/null +++ b/.github/workflows/test/verify-run-status.test.js @@ -0,0 +1,363 @@ +import { describe, expect, it, vi, beforeEach } from "vitest"; +import { verifyRunStatusImpl } from "../src/verify-run-status.js"; +import { createMockCore, createMockGithub } from "./mocks.js"; + +vi.mock("../src/context.js", () => { + return { + extractInputs: vi.fn().mockResolvedValue({ + head_sha: "head_sha", + }), + }; +}); + +vi.mock("../src/github.js", () => { + return { + getCheckRuns: vi.fn().mockResolvedValue([]), + getWorkflowRuns: vi.fn().mockResolvedValue([]), + getCommitStatuses: vi.fn().mockResolvedValue([]), + }; +}); + +describe("verifyRunStatusImpl", () => { + // Reset mock call history before each test + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("verifies status when check_run event fires", async () => { + const github = createMockGithub(); + const { getWorkflowRuns } = await import("../src/github.js"); + getWorkflowRuns.mockResolvedValue([ + { + name: "workflowName", + status: "completed", + conclusion: "success", + }, + ]); + const context = { + eventName: "check_run", + payload: { + check_run: { + name: "checkRunName", + conclusion: "success", + }, + }, + }; + + const core = createMockCore(); + + vi.stubEnv("CHECK_RUN_NAME", "checkRunName"); + vi.stubEnv("WORKFLOW_NAME", "workflowName"); + await verifyRunStatusImpl({ + github, + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.notice).toHaveBeenCalledWith( + "Conclusions match for check run checkRunName and workflow run workflowName", + ); + }); + + it("verifies status when workflow_run event fires", async () => { + const github = createMockGithub(); + github.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [ + { + name: "checkRunName", + status: "completed", + conclusion: "success", + }, + ], + }, + }); + + const context = { + eventName: "workflow_run", + payload: { + workflow_run: { + name: "workflowName", + conclusion: "success", + }, + }, + }; + + const core = createMockCore(); + + vi.stubEnv("CHECK_RUN_NAME", "checkRunName"); + vi.stubEnv("WORKFLOW_NAME", "workflowName"); + await verifyRunStatusImpl({ + github, + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + + expect(core.setFailed).not.toHaveBeenCalled(); + }); + + it("returns early during workflow_run event when no matching check_run is found", async () => { + const github = createMockGithub(); + github.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [], + }, + }); + + const context = { + eventName: "workflow_run", + payload: { + workflow_run: { + name: "workflowName", + conclusion: "success", + }, + }, + }; + const core = createMockCore(); + await verifyRunStatusImpl({ + github, + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.notice).toHaveBeenCalledWith( + "No completed check run with name: checkRunName. Not enough information to judge success or failure. Ending with success status.", + ); + }); + + it("returns early during check_run event when no matching workflow_run is found", async () => { + const { getWorkflowRuns } = await import("../src/github.js"); + getWorkflowRuns.mockResolvedValue([]); + + const context = { + eventName: "check_run", + payload: { + check_run: { + name: "checkRunName", + conclusion: "success", + }, + }, + }; + const core = createMockCore(); + await verifyRunStatusImpl({ + github: createMockGithub(), + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.notice).toHaveBeenCalledWith( + "No completed workflow run with name: workflowName. Not enough information to judge success or failure. Ending with success status.", + ); + }); + + it("returns early if event is check_run but does not match input name", async () => { + const github = createMockGithub(); + const context = { + eventName: "check_run", + payload: { + check_run: { + name: "checkRunName", + conclusion: "success", + }, + }, + }; + const core = createMockCore(); + await verifyRunStatusImpl({ + github, + context, + core, + checkRunName: "otherCheckRunName", + workflowName: "workflowName", + }); + expect(core.setFailed).toHaveBeenCalledWith( + "Check run name (checkRunName) does not match input: otherCheckRunName. Ensure job is filtering by github.event.check_run.name.", + ); + }); + + it("throws if check_run conclusion does not match workflow_run conclusion", async () => { + const { getWorkflowRuns } = await import("../src/github.js"); + getWorkflowRuns.mockResolvedValue([ + { + name: "workflowName", + status: "completed", + conclusion: "failure", + }, + ]); + + const context = { + eventName: "check_run", + payload: { + check_run: { + name: "checkRunName", + conclusion: "success", + }, + }, + }; + const core = createMockCore(); + await verifyRunStatusImpl({ + github: createMockGithub(), + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + expect(core.setFailed).toHaveBeenCalledWith( + "Check run conclusion (success) does not match workflow run conclusion (failure)", + ); + }); + + it("throws when in check_suite event but no check_run with name is found", async () => { + const github = createMockGithub(); + github.rest.checks.listForRef = vi.fn().mockResolvedValue({ + data: { + check_runs: [], + }, + }); + + const context = { + eventName: "check_suite", + payload: { + check_suite: { + app: { + name: "checkRunName", + }, + }, + }, + }; + const core = createMockCore(); + await verifyRunStatusImpl({ + github, + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + expect(core.setFailed).toHaveBeenCalledWith( + "Could not locate check run checkRunName in check suite checkRunName. Ensure job is filtering by github.event.check_suite.app.name.", + ); + }); + + it("fetches commit status from API when not status event", async () => { + const { getCheckRuns, getCommitStatuses } = await import("../src/github.js"); + getCheckRuns.mockResolvedValue([ + { + name: "checkRunName", + conclusion: "success", + html_url: "https://example.com/check", + }, + ]); + getCommitStatuses.mockResolvedValue([ + { + context: "commitStatusName", + state: "success", + target_url: "https://example.com/status", + }, + ]); + + const context = { + eventName: "workflow_run", + payload: { + workflow_run: { + name: "workflowName", + conclusion: "success", + }, + }, + }; + + const core = createMockCore(); + + await verifyRunStatusImpl({ + github: createMockGithub(), + context, + core, + checkRunName: "checkRunName", + commitStatusName: "commitStatusName", + workflowName: "workflowName", + }); + + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.notice).toHaveBeenCalledWith( + "Conclusions match for check run checkRunName and commit status commitStatusName", + ); + }); + + it("handles API error when fetching commit status", async () => { + const { getCheckRuns, getCommitStatuses } = await import("../src/github.js"); + getCheckRuns.mockResolvedValue([ + { + name: "checkRunName", + conclusion: "success", + html_url: "https://example.com/check", + }, + ]); + getCommitStatuses.mockRejectedValue(new Error("API Error")); + + const context = { + eventName: "workflow_run", + payload: { + workflow_run: { + name: "workflowName", + conclusion: "success", + }, + }, + }; + + const core = createMockCore(); + + await verifyRunStatusImpl({ + github: createMockGithub(), + context, + core, + checkRunName: "checkRunName", + commitStatusName: "commitStatusName", + workflowName: "workflowName", + }); + + expect(core.setFailed).toHaveBeenCalledWith("Failed to fetch commit status: API Error"); + }); + + it("verifies neutral check run matches success workflow run", async () => { + const { getCheckRuns } = await import("../src/github.js"); + getCheckRuns.mockResolvedValue([ + { + name: "checkRunName", + conclusion: "neutral", + html_url: "https://example.com/check", + }, + ]); + + const context = { + eventName: "workflow_run", + payload: { + workflow_run: { + name: "workflowName", + conclusion: "success", + }, + }, + }; + + const core = createMockCore(); + + await verifyRunStatusImpl({ + github: createMockGithub(), + context, + core, + checkRunName: "checkRunName", + workflowName: "workflowName", + }); + + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.notice).toHaveBeenCalledWith( + "Conclusions match for check run checkRunName and workflow run workflowName", + ); + }); +}); diff --git a/.github/workflows/typespec-migration-validation.yaml b/.github/workflows/typespec-migration-validation.yaml new file mode 100644 index 000000000000..2536989fe082 --- /dev/null +++ b/.github/workflows/typespec-migration-validation.yaml @@ -0,0 +1,27 @@ +name: TypeSpec Migration Validation + +on: + pull_request: + types: [labeled, unlabeled, opened, reopened, synchronize] + +permissions: + contents: read + +jobs: + typespec-migration-validation: + name: TypeSpec Migration Validation + if: contains(github.event.pull_request.labels.*.name, 'typespec-conversion-w1') || contains(github.event.pull_request.labels.*.name, 'typespec-conversion-w2') + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Setup Node and install deps + uses: ./.github/actions/setup-node-install-deps + + - name: Run TypeSpec Migration Validation + run: | + ./eng/tools/typespec-migration-validation/scripts/download-main.ps1 -Verbose -callValidation $true + shell: pwsh diff --git a/.github/workflows/typespec-validation-all.yaml b/.github/workflows/typespec-validation-all.yaml index 02fea60a3a5c..425cdf110e9d 100644 --- a/.github/workflows/typespec-validation-all.yaml +++ b/.github/workflows/typespec-validation-all.yaml @@ -19,12 +19,14 @@ on: - package.json - tsconfig.json - eng/** + - "!eng/common/**" - specification/suppressions.yaml - specification/common-types/** # Workflow and workflow dependencies - - .github/workflows/typespec-validation-all.yaml - .github/actions/setup-node-npm-ci/** + - .github/shared/** + - .github/workflows/typespec-validation-all.yaml schedule: # Run 4x/day diff --git a/.github/workflows/typespec-validation-test.yaml b/.github/workflows/typespec-validation-test.yaml index 7d565bcf4b64..aa157c887dee 100644 --- a/.github/workflows/typespec-validation-test.yaml +++ b/.github/workflows/typespec-validation-test.yaml @@ -10,6 +10,7 @@ on: - package-lock.json - package.json - tsconfig.json + - .github/shared - .github/workflows/_reusable-eng-tools-test.yaml - .github/workflows/typespec-validation-test.yaml - eng/tools/package.json diff --git a/.github/workflows/typespec-validation.yaml b/.github/workflows/typespec-validation.yaml index 813029a8e8ee..7a38bb20870d 100644 --- a/.github/workflows/typespec-validation.yaml +++ b/.github/workflows/typespec-validation.yaml @@ -24,5 +24,8 @@ jobs: # step as failed. $ErrorActionPreference = 'Continue' - ./eng/scripts/TypeSpec-Validation.ps1 -GitClean -Verbose + # Only "main" and "RPSaaSMaster" should validate all specs if core files change + $ignoreCoreFiles = -not (@('main', 'RPSaaSMaster') -contains $Env:GITHUB_BASE_REF) + + ./eng/scripts/TypeSpec-Validation.ps1 -GitClean -Verbose -IgnoreCoreFiles:$ignoreCoreFiles shell: pwsh diff --git a/.github/workflows/update-labels.yaml b/.github/workflows/update-labels.yaml index 81d386f39c44..beaa0c4aa9db 100644 --- a/.github/workflows/update-labels.yaml +++ b/.github/workflows/update-labels.yaml @@ -7,12 +7,7 @@ on: # If an upstream workflow if completed, get only the artifacts from that workflow, and update labels workflow_run: workflows: - [ - "ARM Auto SignOff", - "SDK Breaking Change Labels", - "SDK Suppressions", - "TypeSpec Requirement", - ] + ["ARM Auto SignOff", "SDK Breaking Change Labels", "SDK Suppressions", "TypeSpec Requirement"] types: [completed] workflow_dispatch: inputs: diff --git a/.github/workflows/watch-avocado.yaml b/.github/workflows/watch-avocado.yaml new file mode 100644 index 000000000000..c083b034d3ae --- /dev/null +++ b/.github/workflows/watch-avocado.yaml @@ -0,0 +1,25 @@ +# Use ~ to sort the workflow to the bottom of the list +name: "~Watch - Avocado" + +on: + # check_suite is preferred over check_run to avoid triggering on all check + # runs. In some cases, check_run must be used in testing environments. + check_suite: + types: completed + + workflow_run: + types: completed + workflows: + - "\\[TEST-IGNORE\\] Swagger Avocado - Set Status" + +permissions: + checks: read + contents: read + +jobs: + Avocado: + name: Watch Avocado + uses: ./.github/workflows/_reusable-verify-run-status.yaml + with: + check_run_name: "Swagger Avocado" + commit_status_name: "[TEST-IGNORE] Swagger Avocado" diff --git a/.github/workflows/watch-breakingchange.yaml b/.github/workflows/watch-breakingchange.yaml new file mode 100644 index 000000000000..9c14cbe57cb2 --- /dev/null +++ b/.github/workflows/watch-breakingchange.yaml @@ -0,0 +1,25 @@ +# Use ~ to sort the workflow to the bottom of the list +name: "~Watch - BreakingChange" + +on: + # check_suite is preferred over check_run to avoid triggering on all check + # runs. In some cases, check_run must be used in testing environments. + check_suite: + types: completed + + workflow_run: + types: completed + workflows: + - "\\[TEST-IGNORE\\] Swagger BreakingChange - Set Status" + +permissions: + checks: read + contents: read + +jobs: + BreakingChange: + name: Watch BreakingChange + uses: ./.github/workflows/_reusable-verify-run-status.yaml + with: + check_run_name: "Swagger BreakingChange" + commit_status_name: "[TEST-IGNORE] Swagger BreakingChange" diff --git a/.github/workflows/watch-modelvalidation.yaml b/.github/workflows/watch-modelvalidation.yaml new file mode 100644 index 000000000000..a5d13ce232c6 --- /dev/null +++ b/.github/workflows/watch-modelvalidation.yaml @@ -0,0 +1,25 @@ +# Use ~ to sort the workflow to the bottom of the list +name: "~Watch - Swagger ModelValidation" + +on: + # check_suite is preferred over check_run to avoid triggering on all check + # runs. In some cases, check_run must be used in testing environments. + check_suite: + types: completed + + workflow_run: + types: completed + workflows: + - "\\[TEST-IGNORE\\] Swagger ModelValidation" + +permissions: + checks: read + contents: read + +jobs: + ModelValidationWatch: + name: Watch ModelValidation + uses: ./.github/workflows/_reusable-verify-run-status.yaml + with: + check_run_name: "Swagger ModelValidation" + workflow_name: "[TEST-IGNORE] Swagger ModelValidation" diff --git a/.github/workflows/watch-semanticvalidation.yaml b/.github/workflows/watch-semanticvalidation.yaml new file mode 100644 index 000000000000..cba8cce1ead1 --- /dev/null +++ b/.github/workflows/watch-semanticvalidation.yaml @@ -0,0 +1,25 @@ +# Use ~ to sort the workflow to the bottom of the list +name: "~Watch - Swagger SemanticValidation" + +on: + # check_suite is preferred over check_run to avoid triggering on all check + # runs. In some cases, check_run must be used in testing environments. + check_suite: + types: completed + + workflow_run: + types: completed + workflows: + - "\\[TEST-IGNORE\\] Swagger SemanticValidation" + +permissions: + checks: read + contents: read + +jobs: + SemanticValidationWatch: + name: Watch SemanticValidation + uses: ./.github/workflows/_reusable-verify-run-status.yaml + with: + check_run_name: "Swagger SemanticValidation" + workflow_name: "[TEST-IGNORE] Swagger SemanticValidation" diff --git a/.gitignore b/.gitignore index 067e18a1d128..23e8f5dfc864 100644 --- a/.gitignore +++ b/.gitignore @@ -111,6 +111,7 @@ warnings.txt !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json +!.vscode/mcp.json # API Test outputs .apitest @@ -136,6 +137,7 @@ eng/tools/**/dist **/package-lock.json !/package-lock.json !/.github/package-lock.json +!/.github/shared/package-lock.json # No Armstrong outputs should be commited except the tf files. **/terraform/**/*.json diff --git a/.vscode/launch.json b/.vscode/launch.json index fe6da0a7d616..8815f85ff2fd 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -24,4 +24,4 @@ "program": "${file}" } ] -} \ No newline at end of file +} diff --git a/.vscode/mcp.json b/.vscode/mcp.json new file mode 100644 index 000000000000..64321b5cbc14 --- /dev/null +++ b/.vscode/mcp.json @@ -0,0 +1,9 @@ +{ + "servers": { + "azure-sdk-mcp": { + "type": "stdio", + "command": "pwsh", + "args": ["${workspaceFolder}/eng/common/mcp/azure-sdk-mcp.ps1", "-Run", "-Version", "1.0.0-dev.20250702.1"] + }, + } +} \ No newline at end of file diff --git a/azure-pipelines.yml b/azure-pipelines.yml index db0d1bc6729d..e70bfd92c2f6 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -2,7 +2,7 @@ name: "Azure OpenAPI" pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-20.04 + demands: ImageOverride -equals ubuntu-24.04 trigger: branches: diff --git a/documentation/Getting-started-with-TypeSpec-specifications.md b/documentation/Getting-started-with-TypeSpec-specifications.md index d6e1acea042a..b4ceaaad12cf 100644 --- a/documentation/Getting-started-with-TypeSpec-specifications.md +++ b/documentation/Getting-started-with-TypeSpec-specifications.md @@ -26,4 +26,4 @@ Within the azure-rest-api-specs repository, the TypeSpec specification serves as - [Setting up local environment for TypeSpec](./typespec-rest-api-dev-process.md#2-repo-setup--prerequisites) - Read up on [Folder Structure for TypeSpec](./typespec-structure-guidelines.md) - [Setting up a new TypeSpec project](./typespec-rest-api-dev-process.md#3-creating-a-new-typespec-project) -- [Setting up and regenerate SDK projects](./typespec-rest-api-dev-process.md#4-generate-or-refresh-sdk-code-from-a-typespec-project) +- [Setting up and regenerate SDK projects](./typespec-rest-api-dev-process.md#5-generate-sdk-code-from-a-typespec-project) diff --git a/documentation/ci-fix.md b/documentation/ci-fix.md index abbe59f6f094..37599cdb5769 100644 --- a/documentation/ci-fix.md +++ b/documentation/ci-fix.md @@ -15,8 +15,8 @@ If you need help with your specs PR, please first thoroughly read the [aka.ms/az - [Checks troubleshooting guides](#checks-troubleshooting-guides) - [`CredScan`](#credscan) - [`PoliCheck`](#policheck) - - [`SDK azure-powershell`](#sdk-azure-powershell) - - [`SDK azure-sdk-for-*` checks, like `SDK azure-sdk-for-go`](#sdk-azure-sdk-for--checks-like-sdk-azure-sdk-for-go) + - [`SDK Validation *` checks, like `SDK Validation - Go`](#sdk-validation--checks-like-sdk-validation---go) + - [`SDK Breaking Change Review`](#sdk-breaking-change-review) - [`Swagger APIView`](#swagger-apiview) - [If an expected APIView was not generated, follow the step below to troubleshoot.](#if-an-expected-apiview-was-not-generated-follow-the-step-below-to-troubleshoot) - [Diagnosing APIView failure for SDK Language (not Swagger or TypeSpec)](#diagnosing-apiview-failure-for-sdk-language-not-swagger-or-typespec) @@ -26,28 +26,17 @@ If you need help with your specs PR, please first thoroughly read the [aka.ms/az - [Run `oad` locally](#run-oad-locally) - [`Swagger LintDiff` and `Swagger Lint(RPaaS)`](#swagger-lintdiff-and-swagger-lintrpaas) - [`Swagger LintDiff` for TypeSpec: troubleshooting guides](#swagger-lintdiff-for-typespec-troubleshooting-guides) - - [`Record` causes `AvoidAdditionalProperties` and `PropertiesTypeObjectNoDefinition`](#recordunknown-causes-avoidadditionalproperties-and-propertiestypeobjectnodefinition) - - [`RequestBodyMustExistForPutPatch`](#requestbodymustexistforputpatch) - - [`PatchPropertiesCorrespondToPutProperties`](#patchpropertiescorrespondtoputproperties) - - [`@singleton` causes `EvenSegmentedPathForPutOperation` and `XmsPageableForListCalls`](#singleton-causes-evensegmentedpathforputoperation-and-xmspageableforlistcalls) - - [`AvoidAnonymousParameter`, `AvoidAnonymousTypes`, `IntegerTypeMustHaveFormat`](#avoidanonymousparameter-avoidanonymoustypes-integertypemusthaveformat) - - [`AvoidAnonymousTypes` inside a 202 response](#avoidanonymoustypes-inside-a-202-response) - - [`OAuth2Auth` causes `XmsEnumValidation`](#oauth2auth-causes-xmsenumvalidation) - - [`ProvisioningStateMustBeReadOnly`](#provisioningstatemustbereadonly) - - [`PatchBodyParameterSchema`](#patchbodyparameterschema) - [`Swagger ModelValidation`](#swagger-modelvalidation) - [`Swagger PrettierCheck`](#swagger-prettiercheck) - [Prettier reference](#prettier-reference) - [`Swagger SemanticValidation`](#swagger-semanticvalidation) - - [`Spell Check`](#spell-check) + - [Spell Check](#spell-check) - [`TypeSpec Validation`](#typespec-validation) - - [Run `tsv` locally](#run-tsv-locally) - [`license/cla`](#licensecla) - [Suppression Process](#suppression-process) - [Checks not covered by this guide](#checks-not-covered-by-this-guide) - [Obsolete checks](#obsolete-checks) - # Prerequisites Most guides here require for you to have `npm` installed, which you can get by installing [Node.js](https://nodejs.org/en/download). @@ -62,45 +51,53 @@ This check is owned by One Engineering System. See [1ES CredScan] for help. This check is owned by One Engineering System. See [1ES PoliCheck] for help. -## `SDK azure-powershell` - -> [!IMPORTANT] -> -> - This check is never blocking merging of a spec PR, even if it fails. -> - The `SDK azure-powershell` check is owned by the `Azure.Core` team, - not the Azure SDK team. - -The owner of this check is Yeming Liu from the `Azure.Core` team. -Please reach out to him with any questions. - -## `SDK azure-sdk-for-*` checks, like `SDK azure-sdk-for-go` +## `SDK Validation *` checks, like `SDK Validation - Go` > [!IMPORTANT] > -> - The `SDK azure-sdk-for-*` checks are owned by the Shanghai division of the Azure SDK team, +> - The `SDK Validation Status` check is a meta check that aggregates the results of all `SDK Validation - {Language}` + checks and reports a unified status. Re-run any individual `SDK Validation - {Language}` checks will automatically + trigger a re-run of this meta check. +> - The `SDK Validation *` checks are owned by the Shanghai division of the Azure SDK team, not the core Redmond Azure SDK team. -> - Only `SDK azure-sdk-for-go` check failure will block a specs PR, because this check serves as a canary for the - entire `SDK azure-sdk-for-*` group of checks. +> - For more information, refer to [SDK Validation FAQ](https://aka.ms/azsdk/sdk-automation-faq). If you have an issue or with any of checks listed in the first column of the table below: | Check name | Owner | GitHub login | |-----------------------------------|----------------| ------------------------------------------------------------- | -| `SDK azure-sdk-for-go` | Chenjie Shi | [tadelesh](https://github.com/tadelesh) | -| `SDK azure-sdk-for-java` | Weidong Xu | [weidongxu-microsoft](https://github.com/weidongxu-microsoft) | -| `SDK azure-sdk-for-js` | Qiaoqiao Zhang | [qiaozha](https://github.com/qiaozha) | -| `SDK azure-sdk-for-net` | Wei Hu | [live1206](https://github.com/live1206) | -| `SDK azure-sdk-for-python` | Yuchao Yan | [msyyc](https://github.com/msyyc) | +| `SDK Validation - Go` | Chenjie Shi | [tadelesh](https://github.com/tadelesh) | +| `SDK Validation - Java` | Weidong Xu | [weidongxu-microsoft](https://github.com/weidongxu-microsoft) | +| `SDK Validation - JS` | Qiaoqiao Zhang | [qiaozha](https://github.com/qiaozha) | +| `SDK Validation - .NET` | Wei Hu | [live1206](https://github.com/live1206) | +| `SDK Validation - Python` | Yuchao Yan | [msyyc](https://github.com/msyyc) | Do the following: 1. Attempt to diagnose the issue yourself: 1. Look at the affected PR's `checks` tab for the failing check. - 1. Click on the `View Azure DevOps build log for more details.` link from that tab and inspect the devOps logs. - For example, for `SDK azure-sdk-for-go` check look into the `SDK azure-sdk-fo-go` job, `SDK Automation` task logs. -1. If your investigation denotes this is likely a bug in the check itself and not your PR, reach out + 2. Click on the `View more details on Azure Pipelines.` link from that tab and inspect the devOps logs. + For example, for `SDK Validation - Go` check look into the `Azure Pipelines/SDK Validation - Go` pipeline run logs. +2. If your investigation denotes this is likely a bug in the check itself and not your PR, reach out to the owner of the check per the aforementioned table. +## `SDK Breaking Change Review` + +> [!IMPORTANT] +> +> - If your PR is flagged with any label that matches the pattern `BreakingChange-{Language}-Sdk`, the SDK breaking + changes will be reviewed by SDK reviewers around two business days after the completion of the first two review steps + in PR review workflow, i.e. REST API breaking change review and ARM review. +> - If you need to suppress the SDK breaking changes, refer to [SDK Suppressions](https://aka.ms/azsdk/sdk-suppression). + +If the SDK breaking changes haven't been reviewed after two additional business days, you may reach out to the reviewers: + +| Language | Reviewer | GitHub login | +|-----------------|-----------------| ------------------------------------------------------------- | +| `Go` | Chenjie Shi | [tadelesh](https://github.com/tadelesh) | +| `JS` | Qiaoqiao Zhang | [qiaozha](https://github.com/qiaozha) | +| `Python` | Yuchao Yan | [msyyc](https://github.com/msyyc) | + ## `Swagger APIView` Various APIViews are generated as part of the Azure REST API specs PR build. Among these are TypeSpec and Swagger as well as any other language that is being generated in the run. When everything is successful you should see a comment box similar to the picture below showing the APIViews generated for TypeSpec or Swagger, plus all other languages being generated. diff --git a/documentation/samplefiles/readme.java.md b/documentation/samplefiles/readme.java.md index 1abdd25dc681..70c57e50aa07 100644 --- a/documentation/samplefiles/readme.java.md +++ b/documentation/samplefiles/readme.java.md @@ -3,6 +3,8 @@ These settings apply only when `--java` is specified on the command line. ``` yaml $(java) +service-name: [[ServiceName]] # human-readable service name, whitespace allowed client-flattened-annotation-target: disabled uuid-as-string: true +output-model-immutable: true ``` diff --git a/documentation/samplefiles/samplereadme.md b/documentation/samplefiles/samplereadme.md new file mode 100644 index 000000000000..750da28419b5 --- /dev/null +++ b/documentation/samplefiles/samplereadme.md @@ -0,0 +1,23 @@ +# [[ServiceName]] + +--- + +## Configuration + +### Basic Information + +These are the global settings for the [[ServiceName]]. + +```yaml +openapi-type: [[OpenApiType]] +tag: package-[[Version]] +``` + +### Tag: package-[[Version]] + +These settings apply only when `--tag=package-[[Version]]` is specified on the command line. + +```yaml $(tag) == 'package-[[Version]]' +input-file: + - [[ResourceProviderName]]/[[ReleaseState]]/[[Version]]/[[ServiceName]].json +``` diff --git a/documentation/sdkautomation/GenerateInputSchema.json b/documentation/sdkautomation/GenerateInputSchema.json deleted file mode 100644 index 49b31364c7ba..000000000000 --- a/documentation/sdkautomation/GenerateInputSchema.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "type": "object", - "properties": { - "dryRun": { - // If dryRun is true, generateScript is expected to parse readme.md - // and output the package list with package name and related readme.md. - // Should not run codegen at this time. - // ** Not supported yet ** - "type": "boolean" - }, - "specFolder": { - // Path to local spec folder. - "type": "string" - }, - "headSha": { - // Git head sha. - "type": "string" - }, - "headRef": { - // Git head ref. - // Format will be "refs/pull//merge" or "refs/heads/". - "type": "string" - }, - "repoHttpsUrl": { - // Spec repo url in https without auth. - "type": "string" - }, - "trigger": { - "$ref": "TriggerType#" - }, - "changedFiles": { - // Changed file list in spec PR. - "type": "array", - "items": { - "type": "string" - } - }, - "relatedReadmeMdFiles": { - // Related readme.md files that pending generation. - "type": "array", - "items": { - "type": "string" - } - }, - "relatedTypeSpecProjectFolder": { - // Related typespec project folder that pending generation. - "type": "array", - "items": { - "type": "string" - } - }, - "installInstructionInput": { - // See #InstallInstructionScriptInput - "$ref": "InstallInstructionScriptInput#" - }, - "autorestConfig": { - // Autorest configuration in spec PR comment - "type": "string" - } - }, - "required": ["specFolder", "headSha", "headRef", "repoHttpsUrl", "trigger", "changedFiles", "relatedReadmeMdFiles"] -} diff --git a/documentation/sdkautomation/GenerateOutputSchema.json b/documentation/sdkautomation/GenerateOutputSchema.json deleted file mode 100644 index b14973a2bcb5..000000000000 --- a/documentation/sdkautomation/GenerateOutputSchema.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "type": "object", - "properties": { - "packages": { - "type": "array", - "items": { - "$ref": "#/definitions/PackageResult" - } - } - }, - "required": [ - "packages" - ], - "definitions": { - "PackageResult": { - "properties": { - "packageName": { - // Name of package. Will be used in branch name and PR title. - // By default it's folder name of first entry in path. - "type": "string" - }, - "result": { - // Status of package. By default it's succeeded. - "type": "string", - "enum": [ - "failed", - "succeeded", - "warning" - ], - "default": "succeeded" - }, - "path": { - // List of package content paths. - // If the path points to a folder then - // all the content under the folder will be included. - "type": "array", - "items": { - "type": "string" - } - }, - "readmeMd": { - // List of related readmeMd of this package. - // Must provide this field if dryRun is true. - "type": "array", - "items": { - "type": "string" - } - }, - "typespecProject": { - // List of related typespec project of this package. - "type": "array", - "items": { - "type": "string" - } - }, - "language": { - "type": "string" - }, - "apiViewArtifact": { - "type": "string" - }, - "changelog": { - "type": "object", - "properties": { - "content": { - // Content of changelog in markdown - "type": "string" - }, - "hasBreakingChange": { - // Does the new package has breaking change - "type": "boolean" - }, - "breakingChangeItems": { - // The breaking change details quote from the changelog - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "content" - ] - }, - "artifacts": { - "type": "array", - "items": { - "type": "string" - } - }, - "installInstructions": { - // See #InstallInstructionScriptOutput - "$ref": "InstallInstructionScriptOutput" - } - }, - "required": [ - "path" - ] - } - } -} diff --git a/documentation/sdkautomation/InitOutputSchema.json b/documentation/sdkautomation/InitOutputSchema.json deleted file mode 100644 index a2353494ccbd..000000000000 --- a/documentation/sdkautomation/InitOutputSchema.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "object", - "properties": { - "envs": { - // Environment variable to be set in following scripts. - "additionalProperties": { - "type": "string" - } - } - } -} diff --git a/documentation/sdkautomation/InstallInstructionScriptInputSchema.json b/documentation/sdkautomation/InstallInstructionScriptInputSchema.json deleted file mode 100644 index d5befed6f1e6..000000000000 --- a/documentation/sdkautomation/InstallInstructionScriptInputSchema.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "$id": "InstallInstructionScriptInput", - "type": "object", - "properties": { - "packageName": { - // The package name. May be skipped if sdk automation don't know the info yet. - "type": "string" - }, - "artifacts": { - // List of artifact's path. May be skipped if sdk automation don't know the info yet. - "type": "array", - "items": { - "type": "string" - } - }, - "isPublic": { - // Is the download url public accessible. - // If it's false, the download command template will be - // az rest --resource -u "{URL}" --output-file {FILENAME} - "type": "boolean" - }, - "downloadCommandTemplate": { - // Download command template. Replace {URL} and {FILENAME} to get the real command. - "type": "string" - }, - "trigger": { - "$ref": "TriggerType#" - } - } -} diff --git a/documentation/sdkautomation/InstallInstructionScriptOutputSchema.json b/documentation/sdkautomation/InstallInstructionScriptOutputSchema.json deleted file mode 100644 index 5563a6f72c39..000000000000 --- a/documentation/sdkautomation/InstallInstructionScriptOutputSchema.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "$id": "InstallInstructionScriptOutput", - "type": "object", - "properties": { - "full": { - // Full version of install instruction will be shown in generated SDK PR. - // Should be in markdown format. - "type": "string" - }, - "lite": { - // Lite version of install instruction will be shown in generated SDK PR. - // Should be in markdown format. - "type": "string" - } - }, - "required": [ - "full" - ] -} diff --git a/documentation/sdkautomation/README.md b/documentation/sdkautomation/README.md index f87cbaa44556..f4778c6e4665 100644 --- a/documentation/sdkautomation/README.md +++ b/documentation/sdkautomation/README.md @@ -1,22 +1,22 @@ # SDK Automation Customization -This is the specification of the new SDK Automation customization configuration. -Old customization that hardcoded in sdk automation will still work but this new -approach is preferred. +This is the specification of the SDK Automation customization configuration. ## SDK Automation workflow -### Opened PR Validation Trigger +### Spec Pull Request Trigger -SDK Automation is launched with matrix in azure pipeline. For each language configured: +SDK Automation is launched in azure pipeline. For each language configured: -1. Get the PR merge commit and clone the spec repo on the merge commit. +1. Clone the spec repo on the merge commit and clone the specified SDK language repo from the main branch. -2. Get the PR changed file list. For each changed file, find the nearest readme.md in parent folder. Get list of related readme.md. +2. Identify the TypeSpec project or the `readme.md` file based on the PR changed files. -3. Filter which sdk will be generated: +3. Trigger the `spec-gen-sdk` pipelines for five SDK languages. - 1. For Swagger PR, filter the list of readme.md with: find the `swagger-to-sdk` section in the readme.md, and see if the specified language is configured for that readme.md. Example of `swagger-to-sdk` in SDK Automation: +4. Validate SDK configuration in the `spec-gen-sdk` pipeline: + + 1. For `readme.md` file, validate the specified language is configured for in the `swagger-to-sdk` section. Example of `swagger-to-sdk` in SDK Automation: ``` ```yaml $(swagger-to-sdk) swagger-to-sdk: @@ -26,38 +26,29 @@ SDK Automation is launched with matrix in azure pipeline. For each language conf - repo: azure-sdk-for-js ``` ``` - 2. For TypeSpec PR, filter the list of tspconfig.yaml: find the `options` config in tspconfig.yaml, and see if the specified language emitter is configured. + 2. For TypeSpec project, validate if the specified language emitter is configured in the `options` config in the `tspconfig.yaml` file. - If the specific language is not configured here, generation for this typespec project will be skipped. - -4. Get `specificationRepositoryConfiguration.json` from spec repo default branch. See [SpecRepoConfig](#specrepoconfig). Get the repo and branch config in the file. + If SDK configuration is not configured here, the SDK generation will be skipped. -5. Clone __mainRepository__ and checkout __mainBranch__. If __secondaryRepository__ is specified then checkout __secondaryRepository__ and __secondaryBranch__ instead. +5. Load `specificationRepositoryConfiguration.json` from the merged PR commit. See [SpecRepoConfig](#specrepoconfig). Get the SDK repo config path. 6. Get `swagger_to_sdk_config.json` from cloned SDK repository. The config file path could be customized by __configFilePath__ in spec config. For the definition of the config see [SwaggerToSdkConfig](#swaggertosdkconfig). 7. Launch __initScript__ defined in [SwaggerToSdkConfig](#swaggertosdkconfig). All the script's working directory is root folder of cloned SDK repository. -8. Calculate __PR diff__ and related `readme.md`. If __generationCallMode__ is __one-for-all-configs__ then run ___one pass for the rest steps___, else (__one-per-config__) ___loop the rest steps___ with each `readme.md`. - -9. Launch __generateScript__ defined in [SwaggerToSdkConfig](#swaggertosdkconfig) with [generateInput.json](#generateinput). The script should produce [generateOutput.json](#generateoutput) if __parseGenerateOutput__ is true. If dryRun is set to true then first run of __generateScript__ will be used to collect package information , then loop each package info and checkout package related branch and launch __generateScript__ with package related readmeMd and dryRun set to false. - -10. Get generated package. If __packageFolderFromFileSearch__ is defined with file search then package folder is detected based on git diff in SDK repository and algorithm described in [SwaggerToSdkConfig Schema](#swaggertosdkconfig-schema). Else package folder is from [generateOutput.json](#generateoutput). For each package ___loop the rest steps___. - -11. Launch __buildScript__ to build the package. Collect the artifacts by config __artifactPathFromFileSearch__. This step could be skipped if it's not defined in [SwaggerToSdkConfig](#swaggertosdkconfig) and it's covered by __generateScript__ and the result could be found in [generateOutput.json](#generateoutput). - -12. Launch __changelogScript__ to get changelog and detect breaking change. This step could be skipped if changelog and breaking change could be found in [generateOutput.json](#generateoutput). If breaking change is found, the spec PR will be labelled with `BreakingChange--Sdk`. +8. Launch __generateScript__ defined in [SwaggerToSdkConfig](#swaggertosdkconfig) with [generateInput.json](#generateinput). The script should produce [generateOutput.json](#generateoutput) if __parseGenerateOutput__ is true. -13. Launch __installInstructionScript__ to get install instruction for that package. This step could be skipped if install instruction could be found in [generateOutput.json](#generateoutput). The lite install instruction will be shown in spec PR comment, the full install instruction will be shown in generated SDK PR. +9. Get generated package. If __packageFolderFromFileSearch__ is defined with file search then package folder is detected based on git diff in SDK repository and algorithm described in [SwaggerToSdkConfig Schema](#swaggertosdkconfig-schema). Else package folder is from [generateOutput.json](#generateoutput). For each package ___loop the rest steps___. -14. Commit the package related code in SDK repository. Force push to [GenerationBranch](#generationbranch) in __integrationRepository__. Create or update [GenerationPR](#generationpr) from [GenerationBranch](#generationbranch) to [MainBranch](#mainbranch) in __integrationRepository__. If __integrationRepository__ is a fork of __mainRepository__, its [MainBranch](#mainbranch) should be synced once a day. +10. Launch __buildScript__ to build the package. Collect the artifacts by config __artifactPathFromFileSearch__. This step could be skipped if it's not defined in [SwaggerToSdkConfig](#swaggertosdkconfig) and it's covered by __generateScript__ and the result could be found in [generateOutput.json](#generateoutput). -### Continuous Integration (PR Merged) Trigger +11. Launch __changelogScript__ to get changelog and detect breaking change. This step could be skipped if changelog and breaking change could be found in [generateOutput.json](#generateoutput). If breaking change is found, the spec PR will be labelled with `BreakingChange--Sdk`. -Almost the same as opened PR trigger, with different on step 14: +12. Launch __installInstructionScript__ to get install instruction for that package. This step could be skipped if install instruction could be found in [generateOutput.json](#generateoutput). The lite install instruction will be shown in spec PR comment, the full install instruction will be shown in generated SDK PR. -14. Commit the package related code in SDK repository. Close [GenerationPR](#generationpr) and delete [GenerationBranch](#generationbranch). Force push to [IntegrationBranch](#integrationbranch) in __integrationRepository__. Create or update [IntegrationPR](#integrationpr) from [IntegrationBranch](#integrationbranch) to [MainBranch](#mainbranch) in __mainRepository__. Close the [integrationPR](#integrationPR) if __closeIntegrationPR__ in [SwaggerToSdkConfig](#swaggertosdkconfig) is set to true. +### Manual Trigger +Almost the same as spec pull request trigger. The difference is users can specify the commitish for both of the spec repo and the SDK repo while triggering the pipeline. In addition, a SDK pull request will be created after the pipeline completes successfully. Refer to https://aka.ms/azsdk/spec-gen-sdk-pipeline-doc for more details. ## Definitions @@ -68,27 +59,15 @@ This is type of file `./specificationRepositoryConfiguration.json` in swagger sp ```json { "sdkRepositoryMappings": { - "azure-sdk-for-js": { - "integrationRepository": "AzureSDKAutomation/azure-sdk-for-js", - "mainRepository": "Azure/azure-sdk-for-js" - }, - "azure-sdk-for-python": { - "integrationRepository": "AzureSDKAutomation/azure-sdk-for-python", - "mainRepository": "Azure/azure-sdk-for-python", - "mainBranch": "release/v3" + "azure-sdk-for-go": { + "configFilePath": "eng/swagger_to_sdk_config.json" } }, "overrides": { "Azure/azure-rest-api-specs-pr": { "sdkRepositoryMappings": { - "azure-sdk-for-js": { - "integrationRepository": "azure-sdk/azure-sdk-for-js-pr", - "mainRepository": "Azure/azure-sdk-for-js-pr" - }, - "azure-sdk-for-python": { - "integrationRepository": "azure-sdk/azure-sdk-for-python-pr", - "mainRepository": "Azure/azure-sdk-for-python-pr", - "mainBranch": "release/v3" + "azure-sdk-for-net": { + "configFilePath": "eng/swagger_to_sdk_config.json" } } } @@ -98,11 +77,11 @@ This is type of file `./specificationRepositoryConfiguration.json` in swagger sp #### SpecRepoConfig Schema -See [./SpecConfigSchema.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/SpecConfigSchema.json) +See [SpecConfigSchema.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/SpecConfigSchema.json) ### SwaggerToSdkConfig This is type of file `./swagger_to_sdk_config.json` in sdk repo. -The running environment of these scripts would be expected to be __Ubuntu 18.04__ on Azure Pipeline. This may change in the future. All the running script should be executable. +The running environment of these scripts would be expected to be __Ubuntu 22__ on Azure Pipeline. This may change in the future. All the running script should be executable. The working folder of all the scripts is the __root folder of sdk repo__. #### SwaggerToSdkConfig Example @@ -174,7 +153,7 @@ The working folder of all the scripts is the __root folder of sdk repo__. #### SwaggerToSdkConfig Schema -See [./SwaggerToSdkConfigSchema.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/SwaggerToSdkConfigSchema.json) +See [SwaggerToSdkConfigSchema.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/SwaggerToSdkConfigSchema.json) ### GenerateInput @@ -184,29 +163,29 @@ Input file for generate script. ```jsonc { - "dryRun": false, "specFolder": "/z/work/azure-rest-api-specs", "headSha": "fce3400431eff281bddd04bed9727e63765b8da0", - "headRef": "refs/pull/1234/merge", "repoHttpsUrl": "https://github.com/Azure/azure-rest-api-specs.git", - "trigger": "pull_request", - "changedFiles": [ - "specification/cdn/something/cdn.json" - ], + "runMode": "spec-pull-request", + "changedFiles": [], + "apiVersion": "", + "sdkReleaseType": "beta", "relatedReadmeMdFiles": [ "specification/cdn/something/readme.md" ], + "relatedTypeSpecProjectFolder": [ + "specification/contosowidgetmanager/Contoso.Management" + ], "installInstructionInput": { "isPublic": false, - "downloadCommandTemplate": "curl -L \"{URL}\" -o {FILENAME}", - "trigger": "pullRequest" + "downloadUrlPrefix": "prefix", } } ``` #### GenerateInput Schema -See [./GenerateInputSchema.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/GenerateInputSchema.json) +See [GenerateInputSchema.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/GenerateInputSchema.json) ### GenerateOutput @@ -245,7 +224,7 @@ Output file for generate script. #### GenerateOutput Schema -See [./GenerateOutputSchema.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/GenerateOutputSchema.json) +See [GenerateOutputSchema.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/GenerateOutputSchema.json) ### InstallInstructionScriptInput @@ -261,14 +240,13 @@ Input of install instruction script. "sdk/cdn/cdn.snuget" ], "isPublic": true, - "downloadCommandTemplate": "curl -L \"{URL}\" -o {FILENAME}", - "trigger": "pullRequest", + "downloadUrlPrefix": "prefix" } ``` #### InstallInstructionScriptInput Schema -See [./InstallInstructionScriptInput.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/InstallInstructionScriptInput.json) +See [InstallInstructionScriptInput.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/InstallInstructionScriptInputSchema.json) ### InstallInstructionScriptOutput @@ -284,35 +262,10 @@ Output of install instruction script. #### InstallInstructionScriptOutput Schema -See [./InstallInstructionScriptOutput.json](https://github.com/Azure/azure-rest-api-specs/blob/master/documentation/sdkautomation/InstallInstructionScriptOutput.json) - -### TriggerType - -#### TriggerType Schema - -```jsonc -{ - // How this generation is triggered. - "$id": "TriggerType", - "type": "string", - "enum": ["pullRequest", "continuousIntegration"] -} -``` +See [InstallInstructionScriptOutput.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/InstallInstructionScriptOutputSchema.json) ### InitOutput #### InitOutput Schema -```jsonc -{ - "type": "object", - "properties": { - "envs": { - // Environment variable to be set in following scripts. - "additionalProperties": { - "type": "string" - } - } - } -} -``` +See [InitOutputSchema.json](https://github.com/Azure/azure-sdk-tools/blob/main/tools/spec-gen-sdk/src/types/InitOutputSchema.json) diff --git a/documentation/sdkautomation/SpecConfigSchema.json b/documentation/sdkautomation/SpecConfigSchema.json deleted file mode 100644 index f293bc13309c..000000000000 --- a/documentation/sdkautomation/SpecConfigSchema.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "type": "object", - "properties": { - "sdkRepositoryMappings": { - // A mapping of SDK repository names to the names of the SDK repositories - // that all interaction should go to instead. - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "$ref": "#/definitions/SdkRepositoryConfig" - }, - { - "type": "string" - } - ] - }, - "propertyNames": { - // The property name is the sdk name identifier. - "type": "string" - } - }, - "overrides": { - // Override config for specific repository. - "type": "object", - "additionalProperties": { - "$ref": "#/" - }, - "propertyNames": { - // The property name is the sdk repo ref. - "$ref": "#/definitions/RepositoryName" - } - } - }, - "required": [ - "sdkRepositoryMappings" - ], - "definitions": { - "RepositoryName": { - // Reference to a repository on github. Could be or /. - // By default the is the same as the owner of the spec repo. - "type": "string" - }, - "SdkRepositoryConfig": { - "type": "object", - "properties": { - "mainRepository": { - // The repository that the final release PR will targeting. - "$ref": "#/definitions/RepositoryName" - }, - "mainBranch": { - // Base branch of codegen branches - "default": "master", - "type": "string" - }, - "integrationRepository": { - // The repository that hold generation branch, generation PR and integration branch. - // By default it's the same as mainRepository - "$ref": "#/definitions/RepositoryName" - }, - "secondaryRepository": { - // Codegen runs on this repository. - // By default it's the same as 'mainRepository' but it could be different. - "$ref": "#/definitions/RepositoryName" - }, - "secondaryBranch": { - // Codegen runs on this branch on secondaryRepository. - // By default it's the same as 'mainBranch' but it could be different. - "type": "string" - }, - "integrationBranchPrefix": { - // The prefix that will be applied to the beginning of integration branches - "type": "string", - "default": "sdkAutomation" - }, - "configFilePath": { - // Path to swagger-to-sdk config in sdk repo - "default": "swagger_to_sdk_config.json" - } - }, - "required": [ - "mainRepository" - ] - } - } -} diff --git a/documentation/sdkautomation/SwaggerToSdkConfigSchema.json b/documentation/sdkautomation/SwaggerToSdkConfigSchema.json deleted file mode 100644 index 31a1a7c1b3f5..000000000000 --- a/documentation/sdkautomation/SwaggerToSdkConfigSchema.json +++ /dev/null @@ -1,299 +0,0 @@ -{ - "type": "object", - "properties": { - "advancedOptions": { - // To keep backward compatibility, but will not list schema for old config options. - "properties": { - "createSdkPullRequests": { - // Should SDK Automation create PR or not. - "type": "boolean", - "default": true - }, - "closeIntegrationPR": { - // Should SDK Automation close integrationPR to reduce noise. - "type": "boolean", - "default": true - }, - "draftIntegrationPR": { - // Should SDK Automation create draft integrationPR to reduce noise. - "type": "boolean", - "default": true - }, - "generationCallMode": { - // If we have multiple related readme.md, should we call generation once with - // all the readme.md or should we call generation multiple times and one per readme.md. - "type": "string", - "enum": [ - "one-per-config", - "one-for-all-configs" - ], - "default": "one-for-all-configs" - }, - "cloneDir": { - // SDK clone directory. By default it's name of sdk repo - "type": "string" - } - }, - "default": { - "createSdkPullRequests": true, - "closeIntegrationPR": true, - "draftIntegrationPR": true - } - }, - "initOptions": { - // Init the environment. Install dependencies. - "type": "object", - "properties": { - "initScript": { - // Script to init dependencies. - // Param: - // initInput.json: Not implemented. Placeholder for input arguments. - // initOutput.json: See #initOutput. - "$ref": "#/definitions/RunOptions" - } - }, - "default": {} - }, - "generateOptions": { - // Generate the SDK code. - "type": "object", - "properties": { - "generateScript": { - // Script to generate the SDK code. - // Param: - // generateInput.json: See #GenerateInput - // generateOutput.json: See #GenerateOutput - "$ref": "#/definitions/RunOptions" - }, - "preprocessDryRunGetPackageName": { - // If this options is set to true, generateScript will first run with - // "dryRun": true to get package name and related readme.md, - // then for each package, checkout the expected branch and launch generateScript. - "type": "boolean", - "default": false - }, - "parseGenerateOutput": { - // Will this script output to generateOutput.json. - // If not, default behavior will be applied that outcome will be - // detected automatically based on filename regex search. - "type": "boolean", - "default": true - } - }, - "default": { - "preprocessDryRunGetPackageName": false, - "parseGenerateOutput": false - } - }, - "packageOptions": { - // Get package folder and build / get changelog - "type": "object", - "properties": { - "packageFolderFromFileSearch": { - "oneOf": [ - { - // If this option is set to object, then package folder will be detected automatically. - // based on filename regex search. - // This options must be set to object if parseGenerateOutput is false. - "type": "object", - "properties": { - "searchRegex": { - // Search algorithm: - // For each changed file detected after generation - // PotentialPackageFolder = folder of changed file - // While PotentialPackageFolder is not root folder of sdk repo: - // If PotentialPackageFolder contains a file that matches the searchRegex: - // PackageFolder found, break - // Else: - // PotentialPackageFolder = parent folder of PotentialPackageFolder - "type": "string", - "format": "regex" - }, - "packageNamePrefix": { - // Prefix to be appended to packageName. - // By default packageName will be the folder name of packageFolder - "type": "string" - } - }, - "required": [ - "searchRegex" - ] - }, - { - // If this option is set to false, then package folder will be from generateOutput.json. - "const": false - } - ] - }, - "buildScript": { - // Build the generated sdk. - // Param: - // Package folder could be a list separated by space if it's from generateOutput.json. - "$ref": "#/definitions/RunOptions" - }, - "changelogScript": { - // Changelog generation and breaking-change detection. - // Param: - // Package folder could be a list separated by space if it's from generateOutput.json. - // Expected output from stdout/stderr: Changelog in markdown - "allOf": [ - { - "$ref": "#/definitions/RunOptions" - } - ], - "properties": { - "breakingChangeDetect": { - // If stdout or stderr matches this in output of changelog tool - // then we assume this SDK has breaking change. - "$ref": "#/definitions/RunLogFilterOptions" - } - } - }, - "breakingChangesLabel": { - // Label to be added in spec PR if breaking change is found - "type": "string" - } - }, - "default": {} - }, - "artifactOptions": { - "properties": { - "artifactPathFromFileSearch": { - "oneOf": [ - { - // If this option is set to object, then artifacts will be detected automatically - // based on filename regex search. - // This options must be set to object if parseGenerateOutput is false. - "type": "object", - "properties": { - "searchRegex": { - // Any file under package folder matching the searchRegex is package artifact. - "type": "string", - "format": "regex" - } - }, - "required": [ - "searchRegex" - ] - }, - { - // If this option is set to false, then package folder will be from generateOutput.json - "const": false - } - ] - }, - "installInstructionScript": { - // Generate install instruction that could be shown in spec PR comment (lite version) - // or in generated SDK PR (full version). - // If generateOutput.json contains installInstruction then this could be skipped. - // Param: - // installInstructionInput.json: See #InstallInstructionScriptInput . - // installInstructionOutput.json: See #InstallInstructionScriptInput . - "$ref": "#/definitions/RunOptions" - } - }, - "default": {} - } - }, - "definitions": { - "RunOptions": { - // Options to run a script and collect log. - "type": "object", - "properties": { - "path": { - // Script path related to repo root - "type": "string" - }, - "envs": { - // Extra environment variable to be passed to the script. - // By default the following envs will be passed: - // USER, HOME, PATH, SHELL, PWD (current directory), TMPDIR (dedicated temp folder) - "type": "array", - "items": { - "type": "string" - }, - "default": [] - }, - "logPrefix": { - // Prefix to be added to SDK Automation log. By default it would be filename of the script. - "type": "string" - }, - "stdout": { - // How should SDK Automation handle the script stdout stream - "allOf": [ - { - "$ref": "#/definitions/RunLogOptions" - } - ] - }, - "stderr": { - // How should SDK Automation handle the script stderr stream - "allOf": [ - { - "$ref": "#/definitions/RunLogOptions" - } - ], - "default": { - "scriptWarning": true - } - }, - "exitCode": { - "properties": { - // How should SDK Automation handle non-zero exitCode. - "showInComment": { - // Should we show this error in comment. - "type": "boolean", - "default": true - }, - "result": { - // If script has non-error exitCode how should we mark the script's result. - "type": "string", - "enum": [ - "error", - "warning", - "ignore" - ], - "default": "error" - } - }, - "default": { - "showInComment": true, - "result": "error" - } - } - }, - "required": [ - "path" - ] - }, - "RunLogOptions": { - // How should SDK Automation handle the log stream. - "showInComment": { - // Should we show this stream in comment. - "$ref": "#/definitions/RunLogFilterOptions" - }, - "scriptError": { - // If any line match, assume the script fails. - "$ref": "#/definitions/RunLogFilterOptions" - }, - "scriptWarning": { - // If any line match, assume the script warns. - "$ref": "#/definitions/RunLogFilterOptions" - } - }, - "RunLogFilterOptions": { - "oneOf": [ - { - // If line of log match this regex then hit - "type": "string", - "format": "regex" - }, - { - // If set to true, any line of log will hit - "type": "boolean" - } - ], - "default": false - } - } -} diff --git a/documentation/sdkautomation/sdk-automation-faq.md b/documentation/sdkautomation/sdk-automation-faq.md deleted file mode 100644 index f3aac8a6e791..000000000000 --- a/documentation/sdkautomation/sdk-automation-faq.md +++ /dev/null @@ -1,36 +0,0 @@ -| Short Link: | [aka.ms/azsdk/sdk-automation-faq](https://aka.ms/azsdk/sdk-automation-faq) | -|--|--| - -# FAQ - -## How to Download the Generated Artifacts - -### Prerequisites -Ensure the SDK automation CI check succeeds. If the check fails, the artifacts might not have been generated. - -### Steps to Download Artifacts -Let's take Python as an example. - -1. Navigate to `Checks` page: click on the `Checks` tab in the navigation pane of the pull request. - ![image](https://github.com/user-attachments/assets/109f7d90-52f6-45ed-ac12-ce2ae3e49af8) - -2. View the `python` SDK Automation Run result: click on the `SDK azure-sdk-for-python` item in the left CI check list. - ![image](https://github.com/user-attachments/assets/d38a0b96-d584-46c1-96ed-eec747652962) - -3. View Azure DevOps Build Pipeline Run: click on the `View Azure DevOps build log for more details` link. - - ![image](https://github.com/Azure/azure-rest-api-specs/assets/20296335/64ec1f22-37df-4597-8259-3dd581656faa) - -4. Browse to Pipeline Run Result: click on the `left` arrow. - - ![image](https://github.com/Azure/azure-rest-api-specs/assets/20296335/726c2e8a-9a39-4af2-b745-0136d53bee6d) - -5. Access Artifacts Page: click on the `artifacts` link. - - ![image](https://github.com/Azure/azure-rest-api-specs/assets/20296335/b2c4c307-a430-4dec-bb09-5ac7e659a418) - -6. Locate Generated Artifacts: - The generated artifacts for Python are located under the `SDK_Artifact_Python` folder. - - ![image](https://github.com/Azure/azure-rest-api-specs/assets/20296335/4cecb794-0ec9-4092-a0cc-b45214438e1e) - diff --git a/documentation/sdkautomation/sdk-suppressions.md b/documentation/sdkautomation/sdk-suppressions.md deleted file mode 100644 index 1a761ed65f7a..000000000000 --- a/documentation/sdkautomation/sdk-suppressions.md +++ /dev/null @@ -1,44 +0,0 @@ -| Short Link: | [aka.ms/azsdk/sdk-suppression](https://aka.ms/azsdk/sdk-suppression) | -|--|--| - -# SDK Breaking Change Review Workflow - -When a specification pull request has breaking change for a specific SDK language, such as `Go`, the pull request will be labelled with "BreakingChange-Go-Sdk". According to [the design principles of Azure SDK](https://azure.github.io/azure-sdk/general_introduction.html#dependable), breaking changes are more harmful to a user’s experience than most new features and improvements are beneficial. Therefore, the pull request author will be strongly encouraged to update the API specification to remove the breaking changes for the SDK. If the breaking change cannot be mitigated by a specification change, the author will need to suppress the breaking changes and have the suppression reviewed. See the following section for details on how to suppress breaking changes. - -Once the suppression is added to the pull request, SDK reviewers will evaluate the suppressions and either provide feedback or approve the suppressions. When the suppressions have been approved, the SDK breaking change review is complete and the pull request can proceed to the next stage. - -> Note: both the suppressions review and the SDK breaking change review will be conducted proactively by SDK reviewers, and the expected completion time is around 48 business hours. - -## How to Suppress the SDK Breaking Changes - -To suppress SDK breaking changes, go to the SDK generation CI check result for a specific SDK, such as "azure-sdk-for-go", and look for the `Absent SDK breaking changes suppressions` section, as shown in the following code block: -``` -Absent SDK breaking changes suppressions -- Function `*LinkerClient.BeginCreateOrUpdate` has been removed -- Function `*LinkerClient.NewListPager` has been removed -``` -### Next Steps - -1. The pull request authors create a file called `sdk-suppressions.yaml` if it doesn't already exist in your service folder in the `azure-rest-api-specs` repository. - - For TypeSpec scenarios, `sdk-suppressions.yaml` should be placed in the root folder of the TypeSpec project, which also contains the `tspconfig.yaml` file. For example, refers to the location for [Contoso.Management](https://github.com/Azure/azure-rest-api-specs/tree/main/specification/contosowidgetmanager/Contoso.Management). - - For OpenAPI scenarios, `sdk-suppressions.yaml` should be placed in the `{service}\resource-manager` folder. For example, see the location for the [contosowidgetmanager resource provider](https://github.com/Azure/azure-rest-api-specs/tree/main/specification/contosowidgetmanager/resource-manager). -2. The pull request authors copy every line under the `Absent SDK breaking changes suppressions` section to the suppression file in the following format: -``` -suppressions: - azure-sdk-for-go: - - package: 'sdk/resourcemanager/servicelinker/armservicelinker' - breaking-changes: - - Function `*LinkerClient.BeginCreateOrUpdate` has been removed - - Function `*LinkerClient.NewListPager` has been removed - -``` -3. The pull request authors add this suppression file change to your spec pull request. Then the suppressions will be recognized by the bot, and the corresponding suppression label will be added to the spec pull request. For example, for the `azure-sdk-for-go`, the label `BreakingChange-Go-Sdk-Suppression` will be added to the pull request. -4. The SDK reviewers will evaluate the suppressions and either approve them or provide feedback. If the suppressions are approved, the corresponding approval label will be added to the pull request. For example, for the `azure-sdk-for-go`, the label `BreakingChange-Go-Sdk-Suppression-Approved` will be added to the pull request. - -## How Does the Suppression Work - -The suppression file will remain with the specification move with it to other branches or repositories(such as from the private specification repository to the public specification repository). The suppressions can be deleted after the SDK has been released to the public. - -## How to Get the Suppression Reviewed - -SDK reviewers will monitor the specification pull requests with breaking change suppression lables. For example, for the `azure-sdk-for-go`, the label is `BreakingChange-Go-Sdk-Suppression`. They will evaluate the suppressions and either approve them or provide feedback. If the suppressions are approved, the corresponding approval label will be added to the pull request. For example, for the `azure-sdk-for-go`, the label `BreakingChange-Go-Sdk-Suppression-Approved` will be added to the pull request. Once the suppression review is complete, the pull request can proceed to the next stage. diff --git a/documentation/typespec-end-to-end-scenarios.md b/documentation/typespec-end-to-end-scenarios.md deleted file mode 100644 index 3084153e2be3..000000000000 --- a/documentation/typespec-end-to-end-scenarios.md +++ /dev/null @@ -1,287 +0,0 @@ -# TypeSpec end to end scenarios - -## Scenario definitions - -It is crucial having simple and smooth processes that allow developer to easily: - -- [TypeSpec end to end scenarios](#typespec-end-to-end-scenarios) - - [Scenario definitions](#scenario-definitions) - - [Four main user scenarios to support](#four-main-user-scenarios-to-support) - - [1. TypeSpec project scaffolding](#1-typespec-project-scaffolding) - - [Flowchart](#flowchart) - - [Remaining Tasks](#remaining-tasks) - - [Details \& Open questions](#details--open-questions) - - [2. SDK code generation](#2-sdk-code-generation) - - [Flowchart](#flowchart-1) - - [Remaining Tasks](#remaining-tasks-1) - - [3. Inner Dev loop SDK generation local scenario](#3-inner-dev-loop-sdk-generation-local-scenario) - - [Flowchart](#flowchart-2) - - [Remaining Tasks](#remaining-tasks-2) - - [Details \& Open questions](#details--open-questions-1) - - [4.Dev Outer loop](#4dev-outer-loop) - - [Use case](#use-case) - - [4.1 Outer Dev loop azure-rest-api-specs pipeline](#41-outer-dev-loop-azure-rest-api-specs-pipeline) - - [Flowchart](#flowchart-3) - - [Details \& Open questions](#details--open-questions-2) - - [4.2 Outer Dev loop SDK repo pipeline](#42-outer-dev-loop-sdk-repo-pipeline) - - [Flowchart](#flowchart-4) - - [Details \& Open questions](#details--open-questions-3) - - [4. Outer loop Overall Status and tracking](#4-outer-loop-overall-status-and-tracking) - - [Remaining Tasks](#remaining-tasks-3) - -Aside from the developer process, we have a few goals on managing repo package version -- Should adopt a centralized package version control to avoid chaos -- Centralized package version control should give flexibility, freedom of relative independent emitter versions - -## Four main user scenarios to support -#### 1. TypeSpec project scaffolding - -##### Flowchart -```mermaid -flowchart TD -classDef highlight fill:#ffd700 -classDef grey fill:#CCCCCC,color:#555555; -User((::)) --> |Develop in rest-api repo|A[Clone rest-api repo] -User((::)) --> |Develop in custom service repo|B1[1.2 `tsp init https://aka.ms/azure-init`] -B1-->D -A --> B[1.1 Create folder structure per spec ] -B --> C["1.2 `tsp init https://aka.ms/azure-init`
delete package.json in project folder"] -C --> D[...iterate on *.tsp specs] -D ---> |Loop|F -D --> E["1.3 generate swagger examples -(need a new script)"] -E --> F["tsp compile . (generate swaggers)"] -F --> G1[Copy specs files into rest-api repo] -G1 --> G2[Optionally: Adopt shift-left pipeline] -G2--> G -F --> G[create a spec PR] -class A,G,D,F grey -``` -##### Remaining Tasks -| Step | Step Detail | Assignee | Implemented | Verified | -|--|--|--|--:|--:| -| 1.1 | [Folder layout doc](./typespec-structure-guidelines.md) | Ray Chen | [X] | [X] | -| 1.2 | `tsp init`.
additional info may be collected for SDK | Allen Zhang | [X] | [ ] | -| 1.3 | Example generation instructions | Ray Chen | [ ] | [ ] | - -##### Details & Open questions - -#### 2. SDK code generation - -##### Flowchart -```mermaid -flowchart TD -classDef highlight fill:#ffd700 -classDef grey fill:#CCCCCC,color:#555555; -User((::))-->A -A["clone Rest-API and SDK repo locally"] --> B -B["2.1 pre-requsite dependencies installation -(each language would have a installation script)"] --> G -subgraph G["run `2.2 TypeSpec-Project-Prcoess.ps1`"] - F - C - D -end -F["2.2.1 create/update tsp-location.yaml"] -C["2.2.2 call TypeSpec-Project-Sync.ps1"] -D["2.2.3 call TypeSpec-Project-Generate.ps1"] -G-->E["code build"] -class A,E grey -class C,D,F highlight -``` -- 2.1 Optional: Scripts should exist under `\eng\scripts\` folder on all repos. - -- 2.2 `TypeSpec-Project-Process.ps1` - - What does this script do? - - fetch `tspconfig.yaml` from remote if it doesn’t exist locally - - parse `tspconfig.yaml` - - create an sdk project folder if none exists - - create/update `tsp-location.yaml` - - call `TypeSpec-Project-Sync.ps1` - - call `TypeSpec-Project-Generate.ps1` - - input: - - typespecProjectDirectory (required) - either a folder of `tspconfig.yaml` or a remoteUrl of `tspconfig.yaml` - - commitSha (optional) - - repoUrl (optional) - - output: n/a - -- 2.2.2 `TypeSpec-Project-Sync.ps1` - - What does this script do? - - fetch specs from remote spec repo or use a local spec repo - - copy specs to temp location under the sdk project folder - - support a local spec repo if one exists (TODO) - - input: - - projectDirectory (required) - - localSpecRepoPath (optional) - - output: n/a - -- 2.2.3 `TypeSpec-Project-Generate.ps1` - - What does this script do? - - create scaffolding for new project (use the folder created by 2.2) - - generate sdk code - - input: - - projectDirectory (required) - - typespecAdditionalOptions (emitter options) (optional) - -- 2.2.3.1 Function `Generate-{Language}-New-Project-Scaffolding` - - What does this function do? - - Create the folders and the files under SDK project folder, such as creating `test` folder, `sln` files that would vary between language repos. Eventually, this project scaffolding would be integrated into language emitter. - - Create or update the files outside of the SDK project folder (CI.yml or pom.xml for java, etc.) - - This function is called by generate script(2.4) - - input: projectDirectory (required) - -##### Remaining Tasks -| Step | Step Detail | Assignee | Implemented | Verified | -|--|--|--|--:|--:| -| 2.1 | Dependencies scripts | SDK owner | [ ] | [] | -| 2.2 | TypeSpec-Project-Process.ps1 | EngSys | [ ] | | [ ] | -| 2.2.2 | TypeSpec-Project-Sync.ps1 | EngSys | [ ] | | [ ] | -| 2.2.3 | TypeSpec-Project-Generate.ps1 | EngSys | [ ] | | [ ] | -| 2.2.3.1 | Generate-{Language}-New-Project-Scaffolding | SDK owner | [ ] | | [ ] | -| 2.3 | Language script to call common script | SDK owner | [] | [] -| 2.4 | Update dotnet build target | Michael, Crystal | [] | [] - -#### 3. Inner Dev loop SDK generation local scenario -##### Flowchart -```mermaid -flowchart TD; - classDef highlight fill:#ffd700 - User((::))-->A - A - A["clone spec repo and clone sdk repo"]-->B - B["... iterate on .tsp specs"]-->C - C["tsp compile ."]-->D - D["optional:copy all related files to spec repo folder if it's not there - (*.tsp,*.json,tspconfig.yaml)"]-->E - D-->F - E["create API spec PR"]-->|loop|B - F["docker run - (a. call `initScript` - 2.1 - b. call `generateScript` - (call 2.2) )"]-->I - D-->K - K["optional:2.1"]-->G - G["2.2"]-->I - I["build code and work on test,sample,readme,etc."]-->J - I-->|loop|B - J["create sdk PR"] - class F,G,H highlight -``` -- Note: docker run command is - ``` - docker run -it --privileged -v {local_spec_repo_path}:/spec-repo -v {local_work_folder}:/work-dir -v sdkgeneration.azurecr.io/sdk-generation:latest --typespec-project={relative_typespec_project} --sdk={sdk_to_generate} - ``` -##### Remaining Tasks -| Step | Step Detail | Assignee | Implemented | Verified | -|--|--|--|--:|--:| - - -##### Details & Open questions -Dev Inner Loop is defined as what spec/sdk developer does locally on the dev box. - -Use case and design proposals -1. Optional: Create a TypeSpec self installer/package that will install all prerequisits such as node, npm, and TypeSpec -2. Spec writers [create recommended folder structures](https://github.com/Azure/azure-rest-api-specs/blob/main/documentation/typespec-structure-guidelines.md) under local clone of `azure-rest-api-specs` repo. -3. In the correct folder, run `tsp init https://aka.ms/typespec/azure` to create TypeSpec project: ARM or DP. Project will not contain package.json but rather rely on the one in the root level. -4. Author TypeSpec files and run `tsp compile .` to verify no errors in the TypeSpec project. It should only run `typespec-autorest` given that is the only emitter enabled via init template project. Genereated swagger would be placed in the appropriate folder. It is not expected to use --emit flag to run any SDK emitters. For that, see step 5. -5. Having a script to call OAV to generate swagger examples. -6. Optionally, after successful compile of TypeSpec project, spec author is able to run simple docker command to generate SDK for particular lanaguages locally. -7. Optionally, sdk developer can work on sdk sample, sdk test, readme, etc., locally. - -#### 4.Dev Outer loop - -Dev Outer Loop is defined as the experience of Spec writers sherparding the TypeSpec documents thru `azure-rest-api-sepcs` and its other depedency repos/branches like `-pr`, `-pr/RPSaasMain`, and `-pr/RPSaaSDev`, to different lanaguge SDK repos. - -The experience should allow as much validations can be verified/debugged locally to ensure a clean CI for simple and fast PR reviews. - -##### Use case -- With the correct inner loop setup, the spec writer should be able to submit all relevant files as PR to `azure-rest-api-specs-pr` repo or `azure-rest-api-specs` repo -- CI pipeline should verify TypeSpec compliation -- CI Pipeline should be able to generate key language SDKs (.net,java,js,python) -- Optionally CI generated key lanauge SDKs changes can be used to directly create PR on SDK public repo -- Optionally user can add customized code to the working branch created by CI pipeline. - -##### 4.1 Outer Dev loop azure-rest-api-specs pipeline -###### Flowchart -```mermaid -flowchart TD; - classDef highlight fill:#ffd700 - Pipeline((::))-->A -A["filter SDK languages to be generated -(from tspconfig.yaml)"]-->B -B["get language scripts path for `initScript` and `generateScript` -(from codegen_to_sdk_config.json)"]-->C -C["run `initScript` -(2.1)"]-->G -subgraph D["run `generateScript`"] - G -end -G["2.2"]-->I -I["package sdk code"]-->J -J["optional:build code and run test"]-->K -K["upload artifacts"]-->L -L["create sdk PR or update existing PR"]-->M -M["generate apiView"] -``` - -###### Details & Open questions - -- Step of "run `initScript`": - - output: a string map of environment variables to be set in following scripts. - -- Step of "run `generateScript`": - - input: [GenerateInputSchema](https://github.com/Azure/azure-rest-api-specs/blob/main/documentation/sdkautomation/GenerateInputSchema.json) - - specFolder - - headSha - - repoUrl - - relatedTypeSpecProjectFolder - - - output: [GenerateOutputSchema](https://github.com/Azure/azure-rest-api-specs/blob/main/documentation/sdkautomation/GenerateOutputSchema.json) - - packageName - - result - - path - - changelog - - artifacts - -##### 4.2 Outer Dev loop SDK repo pipeline -###### Flowchart -```mermaid -flowchart TD; - classDef manualStep fill:#ffd700 - classDef automationStep fill:#7de188 - Pipeline((::))-->A -A["run `initScript` -(2.1)"]-->C -subgraph B["run `generateScript`"] - C - E - F - G -end -C["call 2.2"]-->E -E["package sdk code"]-->F -F["build code"]-->G -G["run test"] -``` -###### Details & Open questions - -#### 4. Outer loop Overall Status and tracking - -CI pipeline implementation - - [x] 4.1 TypeSpec validation components - - [x] compile - - [ ] tspconfig validation (TBA?) - - [ ] breaking change detection (TBA?) - - [x] generated swagger validation - - [x] 4.2 TypeSpec SDK generation components - - [x] code generation - - [ ] code build and test (not all languages have) - - [x] package generation - - [x] API view generation - -###### Remaining Tasks -| Step | Step Detail | Assignee | Implemented | Verified | -|--|--|--|--:|--:| -| 4.1 | Validation Component | | [ ] | [ ] | -| 4.2 | SDK generation Component | | [ ] | | [ ] | diff --git a/documentation/typespec-rest-api-dev-process.md b/documentation/typespec-rest-api-dev-process.md index cdd5dfe7f86e..b20b12a28530 100644 --- a/documentation/typespec-rest-api-dev-process.md +++ b/documentation/typespec-rest-api-dev-process.md @@ -6,52 +6,61 @@ 2. Repo setup & prerequisites 3. Creating a new TypeSpec project 4. Prepare and submit a Pull Request for reviewing -5. Generate or Refresh SDK code from a TypeSpec project +5. Generate SDK code from a TypeSpec project ### 1. Introduction -This document describes the processes of developing Azure Rest APIs and SDKs with TypeSpec language. The steps below assumes that you are developing TypeSpec API specifications in the `azure-rest-api-specs` and `azure-rest-api-specs-pr`repos. +This document describes the processes of developing Azure Rest APIs and SDKs with TypeSpec language. The steps below + assumes that you are developing TypeSpec API specifications in the `azure-rest-api-specs` and `azure-rest-api-specs-pr`repos. -If you are developing within your own ADO repo first and then submitting into `azure-rest-api-specs` repos for review and release, you will need to copy the TypeSpec files over. +If you are developing within your own ADO repo first and then submitting into `azure-rest-api-specs` repos for review and + release, you will need to copy the TypeSpec files over. ### 2. Repo setup & prerequisites -- The main repos for Azure rest-api are [azure-rest-api-specs](https://github.com/azure/azure-rest-api-specs) and [azure-rest-api-specs-pr](https://github.com/azure/azure-rest-api-specs-pr) repos. The `-pr` repo contains `RPSaaSMaster` and `RPSaaSDev` branches for ProviderHub based ARM service specs. +- The main repos for Azure rest-api are [azure-rest-api-specs](https://github.com/azure/azure-rest-api-specs) and [azure-rest-api-specs-pr](https://github.com/azure/azure-rest-api-specs-pr) + repos. The `-pr` repo contains `RPSaaSMaster` and `RPSaaSDev` branches for ProviderHub based ARM service specs. #### 2.1 With local machine development -- [Node.js LTS](https://nodejs.org/en) version 18 or above (LTS Recommended). Ensure you can run the npm command in a command prompt: -``` - npm --version -``` +- [Node.js LTS](https://nodejs.org/en) version 18 or above (LTS Recommended). + Ensure you can run the npm command in a command prompt: -- Run following command in the **repository root folder**. This will install required packages such as TypeSpec compilers and Azure Library packages. + ```npm + npm --version + ``` -``` - npm ci -``` +- Run following command in the **repository root folder**. This will install required packages such as TypeSpec + compilers and Azure Library packages. + + ```npm + npm ci + ``` - Ensure you can run TypeSpec command within the repo folders. -``` - npx tsp --version -``` + ```npm + npx tsp --version + ``` - One-time set up: Install TypeSpec VisualStudio or VSCode extensions to get syntex highlighting, tool tips in IDE: -``` - npx tsp code install -``` -OR -``` - npx tsp vs install -``` + ```npm + npx tsp code install + ``` + + OR + + ```npm + npx tsp vs install + ``` #### 2.2 VSCode with local docker .devcontainer -All prerequisites have been installed in the dev container. You should to have `Docker Desktop` and `WSL2` running if you are on Windows machine. +All prerequisites have been installed in the dev container. You should to have `Docker Desktop` and `WSL2` running if + you are on Windows machine. -To start, you just need to install `Dev Containers` VS code extension, then open the repo path. +To start, you just need to install `Dev Containers` VS code extension, then open the repo path. - VSCode will detect the .devcontainer and prompt you to reopen the workspace. @@ -61,10 +70,11 @@ Once VSCode reopened in Container, you can run any of the program below in the V #### 2.2 VSCode in browser via github codespaces -Github codespaces leverage the same dev container in the repo. The difference is it is hosted in cloud with VSCode in browser. - -To start, you just need to browse to the `azure-rest-api-specs` repo, select `<> Code` drop down and follow `Codespaces` instructions. +Github codespaces leverage the same dev container in the repo. The difference is it is hosted in cloud with VSCode in + browser. +To start, you just need to browse to the `azure-rest-api-specs` repo, select `<> Code` drop down and follow `Codespaces` + instructions. ### 3. Creating a new TypeSpec project @@ -74,18 +84,21 @@ Please first review recommended folder structure detailed in [this document](htt 2. Create your service component folder under the service folder. For example, `Sphere.Management` or `Azure.OpenAI`. 3. Create a new TypeSpec project based on Azure template with command: -```cli - npx tsp init https://aka.ms/typespec/azure-init -``` + ```cli + npx tsp init https://aka.ms/typespec/azure-init + ``` + 4. Select `(rest-api-spec repo) ARM` or `(rest-api-spec repo) Data-plane` and answer appropriate naming questions. 5. Compile the generated TypeSpec project with command: -```cli - npx tsp compile . -``` - The generated swagger files should be correctly placed in `data-plane` or `resource-manager` folders follwoing the naming conventions. + ```cli + npx tsp compile . + ``` -5. Now the project has been set up. You can modify the sample and develop your own APIs with TypeSpec + The generated swagger files should be correctly placed in `data-plane` or `resource-manager` folders following the + naming conventions. + +6. Now the project has been set up. You can modify the sample and develop your own APIs with TypeSpec. ### 4. Prepare and submit a Pull Request for reviewing @@ -97,33 +110,35 @@ Please first review recommended folder structure detailed in [this document](htt The [oav](https://github.com/Azure/oav) provides two ways to generate Swagger examples: - 1. Generating basic examples and then manually modify the values. It will generate two examples for each operation: one contains minimal properties set, the other contains the maximal properties set. Since the auto-generated examples consist of random values for most types, you need replace them with meaningful values. - - ```bash - oav generate-examples openapi.json - ``` + 1. Generating basic examples and then manually modify the values. It will generate two examples for each operation: + one contains minimal properties set, the other contains the maximal properties set. Since the auto-generated + examples consist of random values for most types, you need replace them with meaningful values. - 2. (**Recommended**) Generating high quality examples from API Scenario test. Refer to [API Test section](getstarted/providerhub/step03-api-testing.md). It will validate the API quality and generate Swagger examples from live traffic in API Scenario test. + ```bash + oav generate-examples openapi.json + ``` - ```bash - oav run --spec --generateExample - ``` - - Note, latest OAV tool should automatically generate the following. However, if you are generating the examples manually, please ensure you have: + Note, latest OAV tool should automatically generate the following. However, if you are generating the examples manually, + please ensure you have: - include `title` field and make sure it is descriptive and unique for each operation. - include `operationId`. This is used to match with declared operations in TypeSpec and correctly output in swagger. -4. Add/update the `readme.md` file in either the 'resource-manager' or 'data-plane' folder to specify the version and location of the OpenAPI files. The `readme.md` is needed for both management-plane and data-plane services for REST API Docs generation. For management-plane services, the `readme.md` is also needed for SDK generation -- see [generating client with autorest](https://github.com/Azure/autorest/blob/main/docs/generate/readme.md#keeping-your-options-in-one-place-the-preferred-option). The `readme.md` may contain generation options for multiple languages, separated into high-level sections. +4. Add/update the `readme.md` file in either the 'resource-manager' or 'data-plane' folder to specify the version and + location of the OpenAPI files. The `readme.md` is needed for both management-plane and data-plane services for + REST API Docs generation. Example:[sample-readme](https://github.com/Azure/azure-rest-api-specs/blob/main/documentation/samplefiles/samplereadme.md) 5. Generate swagger files: - sync with the target branch in the azure-rest-api-specs repo - ``` + + ```git git pull upstream ``` + - in the root directory, run `npm install` - - in the project directory, `npx tsp compile`. This will generate swagger files under `resource-manager` or `data-plane` folders. + - in the project directory, `npx tsp compile`. This will generate swagger files under `resource-manager` or + `data-plane` folders. 6. Ensure all generated files under `resource-manager` or `data-plane` have been added to PR. @@ -140,38 +155,10 @@ Please first review recommended folder structure detailed in [this document](htt The CI checks result will be commented on the PR. you can refer to the [CI fix Guide](https://github.com/Azure/azure-rest-api-specs/blob/main/documentation/ci-fix.md). Note: -Since the OpenAPI is generated from TypeSpec, to change the OpenAPI, you must update the TypeSpec file and regenerate the OpenAPI and avoid updating OpenAPI directly to keep the consistency between OpenAPI and TypeSpec. -For support & help, you can post a message to [TypeSpec parters - teams channel](https://teams.microsoft.com/l/channel/19%3a2d4efc54d99e4d00a568da7cf0643c1b%40thread.skype/TypeSpec%2520Partners?groupId=3e17dcb0-4257-4a30-b843-77f47f1d4121&tenantId=72f988bf-86f1-41af-91ab-2d7cd011db47). - -### 5. Generate or Refresh SDK code from a TypeSpec project - -The section describe the process for data-plane SDKs. Management-plane SDKs still follow separate existing `autorest` process. - -This assumes you have cloned language SDKs into your local folder at same level of `azure-rest-api-spec`. -``` - \- - azure-rest-api-specs/ - azure-rest-api-specs-pr/ - azure-sdk-for-java/ - azure-sdk-for-js/ - azure-sdk-for-net/ - azure-sdk-for-python/ -`````` - - You can then run `./eng/script/TypeSpec-Generate-Sdk.ps1` script to generate the necessary SDK folder and project structure if it does not already exist, and then regenerate the SDK source code. - -Scenarios: - -1. Test generation of SDK project and code with local TypeSpec changes in your `azure-rest-api-specs` repo. Please note this cannot be used to submit a SDK PR as it does not contain a valid commit id for the TypeSpec file. - -```cli - cd specifications/contoso/contoso.widgetmanager - ..\..\..\eng\script\TypeSpec-Generate-Sdk.ps1 -SdkRepoRootDirectory C:\Github\fork\azure-sdk-for-java\ -``` +Since the OpenAPI is generated from TypeSpec, to change the OpenAPI, you must update the TypeSpec file and regenerate the + OpenAPI and avoid updating OpenAPI directly to keep the consistency between OpenAPI and TypeSpec. +For support & help, you can post a message to [TypeSpec Discussion - teams channel](https://teams.microsoft.com/l/channel/19%3A906c1efbbec54dc8949ac736633e6bdf%40thread.skype/TypeSpec%20Discussion?groupId=3e17dcb0-4257-4a30-b843-77f47f1d4121&tenantId=72f988bf-86f1-41af-91ab-2d7cd011db47). -2. Generate/refresh SDK code for PR submission. The following command will update tsp-location.yaml with commit id and repo info so the build can be generated and verified by the CI and release pipeline. +### 5. Generate SDK code from a TypeSpec project -``` - cd specifications/contoso/contoso.widgetmanager - ..\..\..\eng\script\TypeSpec-Generate-Sdk.ps1 -SdkRepoRootDirectory C:\Github\fork\azure-sdk-for-java\ [commit id] Azure/azure-rest-api-specs -``` +Refer to [Develop client libraries](https://eng.ms/docs/products/azure-developer-experience/develop/sdk-develop?tabs=management) for more details. diff --git a/documentation/typespec-structure-guidelines.md b/documentation/typespec-structure-guidelines.md index f7ff9359f922..ae2f9e0b5294 100644 --- a/documentation/typespec-structure-guidelines.md +++ b/documentation/typespec-structure-guidelines.md @@ -79,9 +79,13 @@ To differentiate between folders defining a service, an SDK, or both, refer to t Services should **not** have a `package.json` directly in the TypeSpec project directory. Instead, they should use the `package.json` in the root directory of the repo for installing any required dependencies. This root-level `package.json` should only depend on the `@azure-tools/typespec-autorest` and `@azure-tools/typespec-apiview` emitters. -SDK language-specific emitters won't have direct dependencies in the spec repo. Instead, they will come from the language SDK repo itself for generation. For more information, see the example [emitter-package.json](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/emitter-package.json) and [TypeSpec-Project-Scripts](https://github.com/Azure/azure-sdk-tools/blob/main/doc/common/TypeSpec-Project-Scripts.md). +SDK language-specific emitters won't have direct dependencies in the spec repo. Instead, they will come from the language SDK repo itself for generation. For more information, see the example [emitter-package.json](https://github.com/Azure/azure-sdk-for-net/blob/main/eng/emitter-package.json). -Services aiming to generate an SDK need to provide emitter configuration for all the SDK emitters in the `tspconfig.yaml` file. See this [example](..\specification\contosowidgetmanager\Contoso.WidgetManager\tspconfig.yaml). +Services aiming to generate an SDK need to provide emitter configuration for all the SDK emitters in + the `tspconfig.yaml` file. See the following samples for more details: + +- [management plane sample](https://aka.ms/azsdk/tspconfig-sample-mpg) +- [data plane sample](https://aka.ms/azsdk/tspconfig-sample-dpg) ## Libraries for service groups diff --git a/eng/common/TestResources/New-TestResources.ps1 b/eng/common/TestResources/New-TestResources.ps1 index ca2d33fe98ff..078991250e64 100755 --- a/eng/common/TestResources/New-TestResources.ps1 +++ b/eng/common/TestResources/New-TestResources.ps1 @@ -18,7 +18,7 @@ param ( [ValidatePattern('^[-\w\._\(\)]+$')] [string] $ResourceGroupName, - [Parameter(Mandatory = $true, Position = 0)] + [Parameter(Position = 0)] [string] $ServiceDirectory, [Parameter()] @@ -122,6 +122,7 @@ param ( $NewTestResourcesRemainingArguments ) +. (Join-Path $PSScriptRoot .. scripts common.ps1) . (Join-Path $PSScriptRoot .. scripts Helpers Resource-Helpers.ps1) . $PSScriptRoot/TestResources-Helpers.ps1 . $PSScriptRoot/SubConfig-Helpers.ps1 @@ -158,10 +159,13 @@ if ($initialContext) { # try..finally will also trap Ctrl+C. try { - # Enumerate test resources to deploy. Fail if none found. - $repositoryRoot = "$PSScriptRoot/../../.." | Resolve-Path - $root = [System.IO.Path]::Combine($repositoryRoot, "sdk", $ServiceDirectory) | Resolve-Path + $root = $repositoryRoot = "$PSScriptRoot/../../.." | Resolve-Path + + if($ServiceDirectory) { + $root = [System.IO.Path]::Combine($repositoryRoot, "sdk", $ServiceDirectory) | Resolve-Path + } + if ($TestResourcesDirectory) { $root = $TestResourcesDirectory | Resolve-Path # Add an explicit check below in case ErrorActionPreference is overridden and Resolve-Path doesn't stop execution @@ -170,6 +174,7 @@ try { } Write-Verbose "Overriding test resources search directory to '$root'" } + $templateFiles = @() "$ResourceType-resources.json", "$ResourceType-resources.bicep" | ForEach-Object { @@ -191,7 +196,12 @@ try { exit } + # returns empty string if $ServiceDirectory is not set $serviceName = GetServiceLeafDirectoryName $ServiceDirectory + + # in ci, random names are used + # in non-ci, without BaseName, ResourceGroupName or ServiceDirectory, all invocations will + # generate the same resource group name and base name for a given user $BaseName, $ResourceGroupName = GetBaseAndResourceGroupNames ` -baseNameDefault $BaseName ` -resourceGroupNameDefault $ResourceGroupName ` @@ -199,27 +209,18 @@ try { -serviceDirectoryName $serviceName ` -CI $CI - if ($wellKnownTMETenants.Contains($TenantId)) { - # Add a prefix to the resource group name to avoid flagging the usages of local auth - # See details at https://eng.ms/docs/products/onecert-certificates-key-vault-and-dsms/key-vault-dsms/certandsecretmngmt/credfreefaqs#how-can-i-disable-s360-reporting-when-testing-customer-facing-3p-features-that-depend-on-use-of-unsafe-local-auth - $ResourceGroupName = "SSS3PT_" + $ResourceGroupName - } - - if ($ResourceGroupName.Length -gt 90) { - # See limits at https://docs.microsoft.com/azure/architecture/best-practices/resource-naming - Write-Warning -Message "Resource group name '$ResourceGroupName' is too long. So pruning it to be the first 90 characters." - $ResourceGroupName = $ResourceGroupName.Substring(0, 90) - } - # Make sure pre- and post-scripts are passed formerly required arguments. $PSBoundParameters['BaseName'] = $BaseName # Try detecting repos that support OutFile and defaulting to it - if (!$CI -and !$PSBoundParameters.ContainsKey('OutFile') -and $IsWindows) { + if (!$CI -and !$PSBoundParameters.ContainsKey('OutFile')) { # TODO: find a better way to detect the language - if (Test-Path "$repositoryRoot/eng/service.proj") { + if ($IsWindows -and $Language -eq 'dotnet') { $OutFile = $true - Log "Detected .NET repository. Defaulting OutFile to true. Test environment settings would be stored into the file so you don't need to set environment variables manually." + Log "Detected .NET repository. Defaulting OutFile to true. Test environment settings will be stored into a file so you don't need to set environment variables manually." + } elseif ($SupportsTestResourcesDotenv) { + $OutFile = $true + Log "Repository supports reading .env files. Defaulting OutFile to true. Test environment settings may be stored in a .env file so they are read by tests automatically." } } @@ -304,6 +305,19 @@ try { } } + # This needs to happen after we set the TenantId but before we use the ResourceGroupName + if ($wellKnownTMETenants.Contains($TenantId)) { + # Add a prefix to the resource group name to avoid flagging the usages of local auth + # See details at https://eng.ms/docs/products/onecert-certificates-key-vault-and-dsms/key-vault-dsms/certandsecretmngmt/credfreefaqs#how-can-i-disable-s360-reporting-when-testing-customer-facing-3p-features-that-depend-on-use-of-unsafe-local-auth + $ResourceGroupName = "SSS3PT_" + $ResourceGroupName + } + + if ($ResourceGroupName.Length -gt 90) { + # See limits at https://docs.microsoft.com/azure/architecture/best-practices/resource-naming + Write-Warning -Message "Resource group name '$ResourceGroupName' is too long. So pruning it to be the first 90 characters." + $ResourceGroupName = $ResourceGroupName.Substring(0, 90) + } + # If a provisioner service principal was provided log into it to perform the pre- and post-scripts and deployments. if ($ProvisionerApplicationId -and $ServicePrincipalAuth) { $null = Disable-AzContextAutosave -Scope Process @@ -341,10 +355,10 @@ try { if ($context.Account.Type -eq 'User') { # Support corp tenant and TME tenant user id lookups $user = Get-AzADUser -Mail $context.Account.Id - if ($user -eq $null -or !$user.Id) { + if ($null -eq $user -or !$user.Id) { $user = Get-AzADUser -UserPrincipalName $context.Account.Id } - if ($user -eq $null -or !$user.Id) { + if ($null -eq $user -or !$user.Id) { throw "Failed to find entra object ID for the current user" } $ProvisionerApplicationOid = $user.Id @@ -359,9 +373,10 @@ try { $ProvisionerApplicationOid = $sp.Id } - $tags = @{ - Owners = (GetUserName) - ServiceDirectory = $ServiceDirectory + $tags = @{ Owners = (GetUserName) } + + if ($ServiceDirectory) { + $tags['ServiceDirectory'] = $ServiceDirectory } # Tag the resource group to be deleted after a certain number of hours. @@ -418,10 +433,10 @@ try { # Support corp tenant and TME tenant user id lookups $userAccount = (Get-AzADUser -Mail (Get-AzContext).Account.Id) - if ($userAccount -eq $null -or !$userAccount.Id) { + if ($null -eq $userAccount -or !$userAccount.Id) { $userAccount = (Get-AzADUser -UserPrincipalName (Get-AzContext).Account) } - if ($userAccount -eq $null -or !$userAccount.Id) { + if ($null -eq $userAccount -or !$userAccount.Id) { throw "Failed to find entra object ID for the current user" } $TestApplicationOid = $userAccount.Id @@ -657,6 +672,7 @@ $serialized '`@ | ConvertFrom-Json -AsHashtable # Set global variables that aren't always passed as parameters `$ResourceGroupName = `$parameters.ResourceGroupName +`$AdditionalParameters = `$parameters.AdditionalParameters `$DeploymentOutputs = `$parameters.DeploymentOutputs $postDeploymentScript `@parameters "@ @@ -858,14 +874,19 @@ Force creation of resources instead of being prompted. .PARAMETER OutFile Save test environment settings into a .env file next to test resources template. -The contents of the file are protected via the .NET Data Protection API (DPAPI). -This is supported only on Windows. The environment file is scoped to the current -service directory. +On Windows in the Azure/azure-sdk-for-net repository, +the contents of the file are protected via the .NET Data Protection API (DPAPI). +The environment file is scoped to the current service directory. The environment file will be named for the test resources template that it was generated for. For ARM templates, it will be test-resources.json.env. For Bicep templates, test-resources.bicep.env. +If `$SupportsTestResourcesDotenv=$true` in language repos' `LanguageSettings.ps1`, +and if `.env` files are gitignore'd, and if a service directory's `test-resources.bicep` +file does not expose secrets based on `bicep lint`, a `.env` file is written next to +`test-resources.bicep` that can be loaded by a test harness to be used for recording tests. + .PARAMETER SuppressVsoCommands By default, the -CI parameter will print out secrets to logs with Azure Pipelines log commands that cause them to be redacted. For CI environments that don't support this (like diff --git a/eng/common/TestResources/New-TestResources.ps1.md b/eng/common/TestResources/New-TestResources.ps1.md index f44feb1ab432..6b479f23fa0b 100644 --- a/eng/common/TestResources/New-TestResources.ps1.md +++ b/eng/common/TestResources/New-TestResources.ps1.md @@ -588,17 +588,19 @@ Accept wildcard characters: False ### -OutFile Save test environment settings into a .env file next to test resources template. -The contents of the file are protected via the .NET Data Protection API (DPAPI). -This is supported only on Windows. -The environment file is scoped to the current -service directory. +On Windows in the Azure/azure-sdk-for-net repository, +the contents of the file are protected via the .NET Data Protection API (DPAPI). +The environment file is scoped to the current service directory. The environment file will be named for the test resources template that it was -generated for. -For ARM templates, it will be test-resources.json.env. -For +generated for. For ARM templates, it will be test-resources.json.env. For Bicep templates, test-resources.bicep.env. +If `$SupportsTestResourcesDotenv=$true` in language repos' `LanguageSettings.ps1`, +and if `.env` files are gitignore'd, and if a service directory's `test-resources.bicep` +file does not expose secrets based on `bicep lint`, a `.env` file is written next to +`test-resources.bicep` that can be loaded by a test harness to be used for recording tests. + ```yaml Type: SwitchParameter Parameter Sets: (All) diff --git a/eng/common/TestResources/README.md b/eng/common/TestResources/README.md index b63307e24530..8b801963a502 100644 --- a/eng/common/TestResources/README.md +++ b/eng/common/TestResources/README.md @@ -1,7 +1,7 @@ # Live Test Resource Management Running and recording live tests often requires first creating some resources -in Azure. Service directories that include a `test-resources.json` or `test-resources.bicep` +in Azure. Service directories that include a `test-resources.json` or `test-resources.bicep` file require running [New-TestResources.ps1][] to create these resources and output environment variables you must set. @@ -19,8 +19,8 @@ scenarios as well as on hosted agents for continuous integration testing. ## On the Desktop To set up your Azure account to run live tests, you'll need to log into Azure, -and create the resources defined in your `test-resources.json` or `test-resources.bicep` -template as shown in the following example using Azure Key Vault. The script will create +and create the resources defined in your `test-resources.json` or `test-resources.bicep` +template as shown in the following example using Azure Key Vault. The script will create a service principal automatically, or you may create a service principal that can be reused subsequently. @@ -34,12 +34,16 @@ Connect-AzAccount -Subscription 'YOUR SUBSCRIPTION ID' eng\common\TestResources\New-TestResources.ps1 keyvault ``` -The `OutFile` switch will be set by default if you are running this for a .NET project on Windows. -This will save test environment settings into a `test-resources.json.env` file next to `test-resources.json` +The `OutFile` switch will be set by default if you are running this for a .NET project on Windows. +This will save test environment settings into a `test-resources.json.env` file next to `test-resources.json` or a `test-resources.bicep.env` file next to `test-resources.bicep`. The file is protected via DPAPI. The environment file would be scoped to the current repository directory and avoids the need to set environment variables or restart your IDE to recognize them. +It will also be set by default for other repositories and on other platforms if your `assets.json` +file contains `"Dotenv": true`. It must be in your `.gitignore` file; +otherwise, an error is returned and no file is generated. + Along with some log messages, this will output environment variables based on your current shell like in the following example: diff --git a/eng/common/TestResources/Remove-TestResources.ps1 b/eng/common/TestResources/Remove-TestResources.ps1 index 12411c4ee2aa..232c86196c15 100755 --- a/eng/common/TestResources/Remove-TestResources.ps1 +++ b/eng/common/TestResources/Remove-TestResources.ps1 @@ -157,10 +157,6 @@ $context = Get-AzContext if (!$ResourceGroupName) { if ($CI) { - if (!$ServiceDirectory) { - Write-Warning "ServiceDirectory parameter is empty, nothing to remove" - exit 0 - } $envVarName = (BuildServiceDirectoryPrefix (GetServiceLeafDirectoryName $ServiceDirectory)) + "RESOURCE_GROUP" $ResourceGroupName = [Environment]::GetEnvironmentVariable($envVarName) if (!$ResourceGroupName) { @@ -221,7 +217,12 @@ if ($wellKnownSubscriptions.ContainsKey($subscriptionName)) { Log "Selected subscription '$subscriptionName'" if ($ServiceDirectory) { - $root = [System.IO.Path]::Combine("$PSScriptRoot/../../../sdk", $ServiceDirectory) | Resolve-Path + $root = "$PSScriptRoot/../../.." + if($ServiceDirectory) { + $root = "$root/sdk/$ServiceDirectory" + } + $root = $root | Resolve-Path + $preRemovalScript = Join-Path -Path $root -ChildPath "remove-$ResourceType-resources-pre.ps1" if (Test-Path $preRemovalScript) { Log "Invoking pre resource removal script '$preRemovalScript'" @@ -235,6 +236,7 @@ if ($ServiceDirectory) { # Make sure environment files from New-TestResources -OutFile are removed. Get-ChildItem -Path $root -Filter "$ResourceType-resources.json.env" -Recurse | Remove-Item -Force:$Force + Get-ChildItem -Path $root -Filter ".env" -Recurse -Force | Remove-Item -Force } $verifyDeleteScript = { diff --git a/eng/common/TestResources/SubConfig-Helpers.ps1 b/eng/common/TestResources/SubConfig-Helpers.ps1 index ab2344283bb3..061160d59f6c 100644 --- a/eng/common/TestResources/SubConfig-Helpers.ps1 +++ b/eng/common/TestResources/SubConfig-Helpers.ps1 @@ -1,4 +1,7 @@ function BuildServiceDirectoryPrefix([string]$serviceName) { + if(!$serviceName) { + return "" + } $serviceName = $serviceName -replace '[\./\\]', '_' return $serviceName.ToUpperInvariant() + "_" } @@ -32,10 +35,15 @@ function GetBaseAndResourceGroupNames( if ($CI) { $base = 't' + (New-Guid).ToString('n').Substring(0, 16) # Format the resource group name based on resource group naming recommendations and limitations. - $generatedGroup = "rg-{0}-$base" -f ($serviceName -replace '[\.\\\/:]', '-'). - Substring(0, [Math]::Min($serviceDirectoryName.Length, 90 - $base.Length - 4)). - Trim('-'). - ToLowerInvariant() + if ($serviceDirectoryName) { + $generatedGroup = "rg-{0}-$base" -f ($serviceName -replace '[\.\\\/:]', '-'). + Substring(0, [Math]::Min($serviceDirectoryName.Length, 90 - $base.Length - 4)). + Trim('-'). + ToLowerInvariant() + } else { + $generatedGroup = "rg-$base" + } + $group = $resourceGroupNameDefault ? $resourceGroupNameDefault : $generatedGroup Log "Generated resource base name '$base' and resource group name '$group' for CI build" diff --git a/eng/common/TestResources/TestResources-Helpers.ps1 b/eng/common/TestResources/TestResources-Helpers.ps1 index 400cafcefd5b..cbe047ebc5f1 100644 --- a/eng/common/TestResources/TestResources-Helpers.ps1 +++ b/eng/common/TestResources/TestResources-Helpers.ps1 @@ -129,8 +129,30 @@ function MergeHashes([hashtable] $source, [psvariable] $dest) { } } +function IsBicepInstalled() { + try { + bicep --version | Out-Null + return $LASTEXITCODE -eq 0 + } + catch { + return $false + } +} + +function IsAzCliBicepInstalled() { + try { + az bicep version | Out-Null + return $LASTEXITCODE -eq 0 + } + catch { + return $false + } +} + function BuildBicepFile([System.IO.FileSystemInfo] $file) { - if (!(Get-Command bicep -ErrorAction Ignore)) { + $useBicepCli = IsBicepInstalled + + if (!$useBicepCli -and !(IsAzCliBicepInstalled)) { Write-Error "A bicep file was found at '$($file.FullName)' but the Azure Bicep CLI is not installed. See https://aka.ms/bicep-install" throw } @@ -140,7 +162,12 @@ function BuildBicepFile([System.IO.FileSystemInfo] $file) { # Az can deploy bicep files natively, but by compiling here it becomes easier to parse the # outputted json for mismatched parameter declarations. - bicep build $file.FullName --outfile $templateFilePath + if ($useBicepCli) { + bicep build $file.FullName --outfile $templateFilePath + } else { + az bicep build --file $file.FullName --outfile $templateFilePath + } + if ($LASTEXITCODE) { Write-Error "Failure building bicep file '$($file.FullName)'" throw @@ -149,6 +176,42 @@ function BuildBicepFile([System.IO.FileSystemInfo] $file) { return $templateFilePath } +function LintBicepFile([string] $path) { + $useBicepCli = IsBicepInstalled + + if (!$useBicepCli -and !(IsAzCliBicepInstalled)) { + Write-Error "A bicep file was found at '$path' but the Azure Bicep CLI is not installed. See https://aka.ms/bicep-install" + throw + } + + # Work around lack of config file override: https://github.com/Azure/bicep/issues/5013 + $output = bicep lint $path 2>&1 + + if ($useBicepCli) { + $output = bicep lint $path 2>&1 + } else { + $output = az bicep lint --file $path 2>&1 + } + + if ($LASTEXITCODE) { + Write-Error "Failed linting bicep file '$path'" + throw + } + + $clean = $true + foreach ($line in $output) { + $line = $line.ToString() + + # See https://learn.microsoft.com/azure/azure-resource-manager/bicep/bicep-config-linter for lints. + if ($line.Contains('outputs-should-not-contain-secrets')) { + $clean = $false + } + Write-Warning $line + } + + $clean +} + function BuildDeploymentOutputs([string]$serviceName, [object]$azContext, [object]$deployment, [hashtable]$environmentVariables) { $serviceDirectoryPrefix = BuildServiceDirectoryPrefix $serviceName # Add default values @@ -203,19 +266,40 @@ function SetDeploymentOutputs( $deploymentOutputs = BuildDeploymentOutputs $serviceName $azContext $deployment $deploymentEnvironmentVariables if ($OutFile) { - if (!$IsWindows) { - Write-Host 'File option is supported only on Windows' - } + if ($IsWindows -and $Language -eq 'dotnet') { + $outputFile = "$($templateFile.originalFilePath).env" + + $environmentText = $deploymentOutputs | ConvertTo-Json; + $bytes = [System.Text.Encoding]::UTF8.GetBytes($environmentText) + $protectedBytes = [Security.Cryptography.ProtectedData]::Protect($bytes, $null, [Security.Cryptography.DataProtectionScope]::CurrentUser) - $outputFile = "$($templateFile.originalFilePath).env" + Set-Content $outputFile -Value $protectedBytes -AsByteStream -Force - $environmentText = $deploymentOutputs | ConvertTo-Json; - $bytes = [System.Text.Encoding]::UTF8.GetBytes($environmentText) - $protectedBytes = [Security.Cryptography.ProtectedData]::Protect($bytes, $null, [Security.Cryptography.DataProtectionScope]::CurrentUser) + Write-Host "Test environment settings`n$environmentText`nstored into encrypted $outputFile" + } + elseif ($templateFile.originalFilePath -and $templateFile.originalFilePath.EndsWith(".bicep")) { + $bicepTemplateFile = $templateFile.originalFilePath + + # Make sure the file would not write secrets to .env file. + if (!(LintBicepFile $bicepTemplateFile)) { + Write-Error "$bicepTemplateFile may write secrets. No file written." + } + $outputFile = $bicepTemplateFile | Split-Path | Join-Path -ChildPath '.env' - Set-Content $outputFile -Value $protectedBytes -AsByteStream -Force + # Make sure the file would be ignored. + git check-ignore -- "$outputFile" > $null + if ($?) { + $environmentText = foreach ($kv in $deploymentOutputs.GetEnumerator()) { + "$($kv.Key)=`"$($kv.Value)`"" + } - Write-Host "Test environment settings`n $environmentText`nstored into encrypted $outputFile" + Set-Content $outputFile -Value $environmentText -Force + Write-Host "Test environment settings`n$environmentText`nstored in $outputFile" + } + else { + Write-Error "$outputFile is not ignored by .gitignore. No file written." + } + } } else { if (!$CI) { diff --git a/eng/common/TestResources/deploy-test-resources.yml b/eng/common/TestResources/deploy-test-resources.yml index 9a4887622d4c..30efe36e231c 100644 --- a/eng/common/TestResources/deploy-test-resources.yml +++ b/eng/common/TestResources/deploy-test-resources.yml @@ -1,5 +1,6 @@ parameters: - ServiceDirectory: not-set + ServiceDirectory: '' + TestResourcesDirectory: '' ArmTemplateParameters: '@{}' DeleteAfterHours: 8 Location: '' @@ -98,6 +99,7 @@ steps: eng/common/TestResources/New-TestResources.ps1 ` -ResourceType '${{ parameters.ResourceType }}' ` -ServiceDirectory '${{ parameters.ServiceDirectory }}' ` + -TestResourcesDirectory '${{ parameters.TestResourcesDirectory }}' ` -Location '${{ parameters.Location }}' ` -DeleteAfterHours '${{ parameters.DeleteAfterHours }}' ` @subscriptionConfiguration ` @@ -142,6 +144,7 @@ steps: eng/common/TestResources/New-TestResources.ps1 ` -ResourceType '${{ parameters.ResourceType }}' ` -ServiceDirectory '${{ parameters.ServiceDirectory }}' ` + -TestResourcesDirectory '${{ parameters.TestResourcesDirectory }}' ` -Location '${{ parameters.Location }}' ` -DeleteAfterHours '${{ parameters.DeleteAfterHours }}' ` @subscriptionConfiguration ` diff --git a/eng/common/mcp/README.md b/eng/common/mcp/README.md new file mode 100644 index 000000000000..4a7c28e46b55 --- /dev/null +++ b/eng/common/mcp/README.md @@ -0,0 +1,38 @@ +# Azure SDK MCP Servers + +This document details how to author, publish and use [MCP servers](https://github.com/modelcontextprotocol) for azure sdk team usage. + +## Using the Azure SDK MCP Server + +Run the below command to download and run the azure sdk engsys mcp server manually: + +``` +/eng/common/mcp/azure-sdk-mcp.ps1 -Run +``` + +To install the mcp server for use within vscode copilot agent mode, run the following then launch vscode from the repository root. + +``` +/eng/common/mcp/azure-sdk-mcp.ps1 -UpdateVsCodeConfig +``` + +*When updating the config the script will not overwrite any other server configs.* + +The script will install the latest version of the azsdk cli executable from [tools releases](https://github.com/Azure/azure-sdk-tools/releases) and install it to `$HOME/.azure-sdk-mcp/azsdk`. + +## Authoring an MCP server + +Azure SDK MCP server code is in [azure-sdk-tools/tools/azsdk-cli/Azure.Sdk.Tools.Cli](https://github.com/Azure/azure-sdk-tools/tree/main/tools/azsdk-cli/Azure.Sdk.Tools.Cli). + +Azure SDK MCP servers should support [stdio and sse transports](https://modelcontextprotocol.io/docs/concepts/transports#server-sent-events-sse). + +When running in copilot the default is stdio mode, but SSE is useful to support for external debugging. + +### Developing MCP servers in C# + +See the [C# MCP SDK](https://github.com/modelcontextprotocol/csharp-sdk) + +Add an [SSE transport](https://github.com/modelcontextprotocol/csharp-sdk/tree/main/samples/AspNetCoreSseServer) + +TODO: Add the azsdk-cli project to pull in MCP server dependencies from the repo + diff --git a/eng/common/mcp/azure-sdk-mcp.ps1 b/eng/common/mcp/azure-sdk-mcp.ps1 new file mode 100755 index 000000000000..b56fb4e96a28 --- /dev/null +++ b/eng/common/mcp/azure-sdk-mcp.ps1 @@ -0,0 +1,66 @@ +#!/bin/env pwsh + +param( + [string]$FileName = 'Azure.Sdk.Tools.Cli', + [string]$Package = 'azsdk', + [string]$Version, # Default to latest + [string]$InstallDirectory = '', + [string]$Repository = 'Azure/azure-sdk-tools', + [string]$RunDirectory = (Resolve-Path (Join-Path $PSScriptRoot .. .. ..)), + [switch]$Run, + [switch]$UpdateVsCodeConfig, + [switch]$Clean +) + +$ErrorActionPreference = "Stop" + +if (-not $InstallDirectory) +{ + $homeDir = if ($env:HOME) { $env:HOME } else { $env:USERPROFILE } + $InstallDirectory = (Join-Path $homeDir ".azure-sdk-mcp" "azsdk") +} +. (Join-Path $PSScriptRoot '..' 'scripts' 'Helpers' 'AzSdkTool-Helpers.ps1') + +if ($Clean) { + Clear-Directory -Path $InstallDirectory +} + +if ($UpdateVsCodeConfig) { + $vscodeConfigPath = Join-Path $PSScriptRoot ".." ".." ".." ".vscode" "mcp.json" + if (Test-Path $vscodeConfigPath) { + $vscodeConfig = Get-Content -Raw $vscodeConfigPath | ConvertFrom-Json -AsHashtable + } + else { + $vscodeConfig = @{} + } + $serverKey = "azure-sdk-mcp" + $serverConfig = @{ + "type" = "stdio" + "command" = "$PSCommandPath" + } + $orderedServers = [ordered]@{ + $serverKey = $serverConfig + } + if (-not $vscodeConfig.ContainsKey('servers')) { + $vscodeConfig['servers'] = @{} + } + foreach ($key in $vscodeConfig.servers.Keys) { + if ($key -ne $serverKey) { + $orderedServers[$key] = $vscodeConfig.servers[$key] + } + } + $vscodeConfig.servers = $orderedServers + Write-Host "Updating vscode mcp config at $vscodeConfigPath" + $vscodeConfig | ConvertTo-Json -Depth 10 | Set-Content -Path $vscodeConfigPath -Force +} + +$exe = Install-Standalone-Tool ` + -Version $Version ` + -FileName $FileName ` + -Package $Package ` + -Directory $InstallDirectory ` + -Repository $Repository + +if ($Run) { + Start-Process -WorkingDirectory $RunDirectory -FilePath $exe -ArgumentList 'start' -NoNewWindow -Wait +} diff --git a/eng/common/pipelines/codeowners-linter.yml b/eng/common/pipelines/codeowners-linter.yml index 821b0ea8b5a7..cf4d113414d9 100644 --- a/eng/common/pipelines/codeowners-linter.yml +++ b/eng/common/pipelines/codeowners-linter.yml @@ -27,8 +27,8 @@ stages: - job: Run timeoutInMinutes: 120 pool: - name: azsdk-pool-mms-ubuntu-2204-general - vmImage: ubuntu-22.04 + name: azsdk-pool + demands: ImageOverride -equals ubuntu-24.04 variables: CodeownersLinterVersion: '1.0.0-dev.20240926.2' @@ -38,13 +38,10 @@ stages: UserOrgUri: "https://azuresdkartifacts.blob.core.windows.net/azure-sdk-write-teams/user-org-visibility-blob" steps: - - task: DotNetCoreCLI@2 - displayName: 'Install CodeownersLinter' - inputs: - command: custom - custom: 'tool' - arguments: 'install --global --add-source "$(DotNetDevOpsFeed)" --version "$(CodeownersLinterVersion)" "Azure.Sdk.Tools.CodeownersLinter"' - workingDirectory: '$(Build.SourcesDirectory)/eng/common' + - pwsh: | + dotnet tool install --global --add-source "$(DotNetDevOpsFeed)" --version "$(CodeownersLinterVersion)" "Azure.Sdk.Tools.CodeownersLinter" + displayName: Install CodeownersLinter + workingDirectory: '$(Agent.WorkFolder)' # Some directory outside of the source clone to avoid hitting global.json files when any version of dotnet will work for this install - ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: - pwsh: | diff --git a/eng/common/pipelines/templates/archetype-typespec-emitter.yml b/eng/common/pipelines/templates/archetype-typespec-emitter.yml index 21fafd58d05e..a92ad9d9935e 100644 --- a/eng/common/pipelines/templates/archetype-typespec-emitter.yml +++ b/eng/common/pipelines/templates/archetype-typespec-emitter.yml @@ -22,7 +22,7 @@ parameters: type: stepList default: [] -# Indicates the build matrix to use for post-build autorest validation +# Indicates the build matrix to use for post-build TypeSpec validation - name: TestMatrix type: object default: {} @@ -37,6 +37,11 @@ parameters: type: boolean default: true +# Whether the built packages contain nuget packages. If true, the publish stage will publish the nuget packages to the internal nuget feed. +- name: HasNugetPackages + type: boolean + default: false + # Indicates if the Publish stage should depend on the Test stage - name: PublishDependsOnTest type: boolean @@ -57,7 +62,7 @@ parameters: type: number default: 10 -# Indicates if regenration matrix should only contain folders with typespec files +# Indicates if regeneration matrix should only contain folders with typespec files - name: OnlyGenerateTypespec type: boolean default: false @@ -67,12 +72,21 @@ parameters: type: object default: [] +# The path to the emitter package json file. +- name: EmitterPackageJsonOutputPath + type: string + default: "eng/emitter-package.json" + +# The relative path to the emitter package. +- name: EmitterPackagePath + type: string + extends: template: /eng/pipelines/templates/stages/1es-redirect.yml parameters: stages: # Build stage - # Responsible for building the autorest generator and typespec emitter packages + # Responsible for building the TypeSpec generator and emitter packages # Produces the artifact `build_artifacts` which contains the following: # package-versions.json: Contains a map of package name to version for the packages that were built # overrides.json: Contains npm package version overrides for the emitter and generator @@ -89,24 +103,35 @@ extends: - ${{ parameters.InitializationSteps }} + # Initialize-WorkingDirectory.ps1 is responsible for setting the emitterVersion output variable. - task: PowerShell@2 displayName: 'Run initialize script' + name: initialize inputs: pwsh: true filePath: $(Build.SourcesDirectory)/eng/scripts/typespec/Initialize-WorkingDirectory.ps1 - arguments: -UseTypeSpecNext:$${{ parameters.UseTypeSpecNext }} + ${{ if parameters.BuildPrereleaseVersion }}: + arguments: > + -PrereleaseSuffix "-alpha.$(Build.BuildNumber)" + -OutputDirectory "$(Build.ArtifactStagingDirectory)" + -UseTypeSpecNext:$${{ parameters.UseTypeSpecNext }} + -EmitterPackagePath:${{ parameters.EmitterPackagePath }} + ${{ else }}: + arguments: > + -OutputDirectory "$(Build.ArtifactStagingDirectory)" + -UseTypeSpecNext:$${{ parameters.UseTypeSpecNext }} + -EmitterPackagePath:${{ parameters.EmitterPackagePath }} - task: PowerShell@2 displayName: 'Run build script' - name: ci_build inputs: pwsh: true filePath: $(Build.SourcesDirectory)/eng/scripts/typespec/Build-Emitter.ps1 arguments: > - -BuildNumber "$(Build.BuildNumber)" - -OutputDirectory "$(Build.ArtifactStagingDirectory)" + -OutputDirectory "$(Build.ArtifactStagingDirectory)/packages" -TargetNpmJsFeed:$${{ parameters.PublishPublic }} - -Prerelease:$${{ parameters.BuildPrereleaseVersion }} + -EmitterPackagePath:${{ parameters.EmitterPackagePath }} + -GeneratorVersion: $(initialize.emitterVersion) - pwsh: | $sourceBranch = '$(Build.SourceBranch)' @@ -132,89 +157,82 @@ extends: artifactPath: $(Build.ArtifactStagingDirectory) # Publish stage - # Responsible for publishing the packages in `build_artifacts/packages` and producing `emitter-package-lock.json` - # Produces the artifact `publish_artifacts` which contains the following: - # emitter-package.json: Created using the package json from the build step. - # emitter-package-lock.json: Created by calling `npm install` using `emitter-package.json` + # Responsible for publishing the packages in `build_artifacts/packages` - ${{ if parameters.ShouldPublish }}: - stage: Publish dependsOn: - Build - ${{ if and(parameters.PublishDependsOnTest, ne(length(parameters.TestMatrix), 0)) }}: - Test + variables: buildArtifactsPath: $(Pipeline.Workspace)/build_artifacts - pool: ${{ parameters.Pool }} - jobs: - - job: Publish - steps: - - template: /eng/common/pipelines/templates/steps/sparse-checkout.yml - - - download: current - artifact: build_artifacts - displayName: Download build artifacts - - # Create authenticated .npmrc file for publishing to devops - - template: /eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml - parameters: - npmrcPath: $(buildArtifactsPath)/packages/.npmrc - registryUrl: https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest/npm/registry/ - - # publish to devops feed - - pwsh: | - $packageFiles = Get-ChildItem -Path . -Filter '*.tgz' - foreach ($file in $packageFiles.Name) { - Write-Host "npm publish $file --verbose --access public" - npm publish $file --verbose --access public - } - displayName: Publish to DevOps feed - workingDirectory: $(buildArtifactsPath)/packages - - - ${{ if parameters.PublishPublic }}: - # publish to npmjs.org using ESRP - - task: EsrpRelease@9 - inputs: - displayName: Publish to npmjs.org - ConnectedServiceName: Azure SDK PME Managed Identity - ClientId: 5f81938c-2544-4f1f-9251-dd9de5b8a81b - DomainTenantId: 975f013f-7f24-47e8-a7d3-abc4752bf346 - UseManagedIdentity: true - KeyVaultName: kv-azuresdk-codesign - SignCertName: azure-sdk-esrp-release-certificate - Intent: PackageDistribution - ContentType: npm - FolderLocation: $(buildArtifactsPath)/packages - Owners: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} - Approvers: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} - ServiceEndpointUrl: https://api.esrp.microsoft.com - MainPublisher: ESRPRELPACMANTEST - - - task: PowerShell@2 - displayName: Create emitter-package.json - inputs: - pwsh: true - filePath: ./eng/common/scripts/typespec/New-EmitterPackageJson.ps1 - arguments: > - -PackageJsonPath '$(buildArtifactsPath)/lock-files/package.json' - -OverridesPath '$(buildArtifactsPath)/overrides.json' - -OutputDirectory '$(Build.ArtifactStagingDirectory)' - workingDirectory: $(Build.SourcesDirectory) - - - task: PowerShell@2 - displayName: Create emitter-package-lock.json - inputs: - pwsh: true - filePath: ./eng/common/scripts/typespec/New-EmitterPackageLock.ps1 - arguments: > - -EmitterPackageJsonPath '$(Build.ArtifactStagingDirectory)/emitter-package.json' - -OutputDirectory '$(Build.ArtifactStagingDirectory)' - workingDirectory: $(Build.SourcesDirectory) - - template: /eng/common/pipelines/templates/steps/publish-1es-artifact.yml - parameters: - artifactName: publish_artifacts - artifactPath: $(Build.ArtifactStagingDirectory) + jobs: + - deployment: Publish + environment: none + + pool: + name: azsdk-pool + image: windows-2022 # Nuget publish requires .NET framework on windows to handle the auth + os: windows + + templateContext: + type: releaseJob + isProduction: true + inputs: # All input build artifacts must be declared here + - input: pipelineArtifact # Required, type of the input artifact + artifactName: build_artifacts + targetPath: $(buildArtifactsPath) + strategy: + runOnce: + deploy: + steps: + # Create authenticated .npmrc file for publishing to devops + - template: /eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml + parameters: + npmrcPath: $(buildArtifactsPath)/packages/.npmrc + registryUrl: https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest/npm/registry/ + + # publish to devops feed + - pwsh: | + $packageFiles = Get-ChildItem -Path . -Filter '*.tgz' + foreach ($file in $packageFiles.Name) { + Write-Host "npm publish $file --verbose --access public" + npm publish $file --verbose --access public + } + displayName: Publish to DevOps feed + workingDirectory: $(buildArtifactsPath)/packages + + # Publish to https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-net + - ${{ if parameters.HasNugetPackages }}: + - task: 1ES.PublishNuget@1 + displayName: Publish NuGet Packages to DevOps feed + inputs: + packagesToPush: $(buildArtifactsPath)/packages/*.nupkg;!$(buildArtifactsPath)/packages/*.symbols.nupkg + packageParentPath: $(buildArtifactsPath)/packages + publishVstsFeed: "public/azure-sdk-for-net" + + - ${{ if parameters.PublishPublic }}: + # publish to npmjs.org using ESRP + - task: EsrpRelease@9 + inputs: + displayName: Publish to npmjs.org + ConnectedServiceName: Azure SDK PME Managed Identity + ClientId: 5f81938c-2544-4f1f-9251-dd9de5b8a81b + DomainTenantId: 975f013f-7f24-47e8-a7d3-abc4752bf346 + UseManagedIdentity: true + KeyVaultName: kv-azuresdk-codesign + SignCertName: azure-sdk-esrp-release-certificate + Intent: PackageDistribution + ContentType: npm + FolderLocation: $(buildArtifactsPath)/packages + Owners: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} + Approvers: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} + ServiceEndpointUrl: https://api.esrp.microsoft.com + MainPublisher: ESRPRELPACMANTEST + # Regenerate stage # Responsible for regenerating the SDK code using the emitter package and the generation matrix. - ${{ if and(parameters.ShouldPublish, parameters.ShouldRegenerate) }}: @@ -224,7 +242,7 @@ extends: - Publish variables: pullRequestTargetBranch: 'main' - publishArtifactsPath: $(Pipeline.Workspace)/publish_artifacts + buildArtifactsPath: $(Pipeline.Workspace)/build_artifacts branchName: $[stageDependencies.Build.Build.outputs['set_branch_name.branchName']] pool: ${{ parameters.Pool }} jobs: @@ -235,17 +253,37 @@ extends: Paths: - "/*" - "!SessionRecords" + + - task: UseNode@1 + displayName: 'Install Node.js' + inputs: + version: '22.x' - download: current displayName: Download pipeline artifacts - pwsh: | - Write-Host "Copying emitter-package.json to $(Build.SourcesDirectory)/eng" - Copy-Item $(publishArtifactsPath)/emitter-package.json $(Build.SourcesDirectory)/eng/ -Force + npm install -g @azure-tools/typespec-client-generator-cli@latest + displayName: Install tsp-client - Write-Host "Copying emitter-package-lock.json to $(Build.SourcesDirectory)/eng" - Copy-Item $(publishArtifactsPath)/emitter-package-lock.json $(Build.SourcesDirectory)/eng/ -Force - displayName: Copy emitter-package json files + - pwsh: | + Write-Host "Overrides location: $(buildArtifactsPath)/packages/overrides.json" + + if (Test-Path -Path '$(buildArtifactsPath)/packages/overrides.json') { + Write-Host "Using overrides.json to generate emitter-package.json" + tsp-client generate-config-files ` + --package-json '$(buildArtifactsPath)/lock-files/package.json' ` + --emitter-package-json-path '${{ parameters.EmitterPackageJsonOutputPath }}' ` + --overrides '$(buildArtifactsPath)/packages/overrides.json' + } else { + Write-Host "No overrides.json found. Running tsp-client without overrides." + + tsp-client generate-config-files ` + --package-json '$(buildArtifactsPath)/lock-files/package.json' ` + --emitter-package-json-path '${{ parameters.EmitterPackageJsonOutputPath }}' + } + displayName: Generate emitter-package.json and emitter-package-lock files + workingDirectory: $(Build.SourcesDirectory) - ${{ parameters.InitializationSteps }} @@ -254,7 +292,7 @@ extends: BaseRepoOwner: azure-sdk TargetRepoName: $(Build.Repository.Name) BaseRepoBranch: $(branchName) - CommitMsg: Initialize repository for autorest build $(Build.BuildNumber) + CommitMsg: Regenerate repository SDK with TypeSpec build $(Build.BuildNumber) WorkingDirectory: $(Build.SourcesDirectory) ScriptDirectory: $(Build.SourcesDirectory)/eng/common/scripts # To accomodate scheduled runs and retries, we want to overwrite any existing changes on the branch @@ -285,13 +323,23 @@ extends: matrix: $[dependencies.Initialize.outputs['generate_matrix.matrix']] variables: matrixArtifactsPath: $(Pipeline.Workspace)/matrix_artifacts + AzureSdkRepoName: $[format('azure-sdk/{0}', split(variables['Build.Repository.Name'], '/')[1])] steps: - template: /eng/common/pipelines/templates/steps/sparse-checkout.yml parameters: Paths: - "/*" - "!SessionRecords" - + Repositories: + - Name: $(AzureSdkRepoName) + Commitish: $(branchName) + WorkingDirectory: $(System.DefaultWorkingDirectory) + + - task: UseNode@1 + displayName: 'Install Node.js' + inputs: + version: '22.x' + - download: current displayName: Download pipeline artifacts @@ -322,7 +370,7 @@ extends: - Generate variables: generateJobResult: $[dependencies.Generate.result] - emitterVersion: $[stageDependencies.Build.Build.outputs['ci_build.emitterVersion']] + emitterVersion: $[stageDependencies.Build.Build.outputs['initialize.emitterVersion']] steps: - template: /eng/common/pipelines/templates/steps/sparse-checkout.yml @@ -356,9 +404,9 @@ extends: $prTitle = "Scheduled code regeneration test" } else { if ($preRelease) { - $prTitle = "Update typespec emitter version to prerelease $emitterVersion" + $prTitle = "Update TypeSpec emitter version to prerelease $emitterVersion" } else { - $prTitle = "Update typespec emitter version to $emitterVersion" + $prTitle = "Update TypeSpec emitter version to $emitterVersion" } if ($generateJobResult -ne 'Succeeded') { @@ -382,6 +430,16 @@ extends: } else { Write-Error "Build.Repository.Name not in the expected {Owner}/{Name} format" } + + $openAsDraft = -not ($reason -eq 'IndividualCI' -and $sourceBranch -eq 'refs/heads/main') + Write-Host "Setting OpenAsDraftBool = $openAsDraft" + Write-Host "##vso[task.setvariable variable=OpenAsDraft]$openAsDraft" + if ($openAsDraft) { + Write-Host "##vso[task.setvariable variable=PRLabels]Do Not Merge" + } else { + Write-Host "##vso[task.setvariable variable=PRLabels]" + } + displayName: Get PR title and body - task: PowerShell@2 @@ -398,8 +456,8 @@ extends: -AuthToken '$(azuresdk-github-pat)' -PRTitle '$(PullRequestTitle)' -PRBody '$(PullRequestBody)' - -OpenAsDraft $true - -PRLabels 'Do Not Merge' + -OpenAsDraft $$(OpenAsDraft) + -PRLabels '$(PRLabels)' workingDirectory: $(Build.SourcesDirectory) # Test stage @@ -432,7 +490,9 @@ extends: inputs: pwsh: true filePath: $(Build.SourcesDirectory)/eng/scripts/typespec/Initialize-WorkingDirectory.ps1 - arguments: -BuildArtifactsPath '$(buildArtifactsPath)' + arguments: > + -BuildArtifactsPath '$(buildArtifactsPath)/lock-files' + -EmitterPackagePath: ${{ parameters.EmitterPackagePath }} - task: PowerShell@2 displayName: 'Run test script' @@ -442,6 +502,7 @@ extends: arguments: > $(TestArguments) -OutputDirectory "$(Build.ArtifactStagingDirectory)" + -EmitterPackagePath: ${{ parameters.EmitterPackagePath }} - template: /eng/common/pipelines/templates/steps/publish-1es-artifact.yml parameters: diff --git a/eng/common/pipelines/templates/jobs/archetype-sdk-tests-generate.yml b/eng/common/pipelines/templates/jobs/archetype-sdk-tests-generate.yml index afc5de87ce1f..00aaec7c6a6e 100644 --- a/eng/common/pipelines/templates/jobs/archetype-sdk-tests-generate.yml +++ b/eng/common/pipelines/templates/jobs/archetype-sdk-tests-generate.yml @@ -27,10 +27,10 @@ parameters: default: [] - name: Pool type: string - default: azsdk-pool-mms-ubuntu-2204-general + default: azsdk-pool - name: OsVmImage type: string - default: ubuntu-22.04 + default: ubuntu-24.04 # This parameter is only necessary if there are multiple invocations of this template within the SAME STAGE. # When that occurs, provide a name other than the default value. - name: GenerateJobName diff --git a/eng/common/pipelines/templates/jobs/docindex.yml b/eng/common/pipelines/templates/jobs/docindex.yml index 45c19dc21000..077d07ad16de 100644 --- a/eng/common/pipelines/templates/jobs/docindex.yml +++ b/eng/common/pipelines/templates/jobs/docindex.yml @@ -1,7 +1,8 @@ jobs: - job: CreateDocIndex pool: - name: azsdk-pool-mms-win-2022-general + name: azsdk-pool + demands: ImageOverride -equals windows-2022 steps: - task: UsePythonVersion@0 displayName: 'Use Python 3.11' @@ -36,14 +37,14 @@ jobs: Copy-Item -Path $(Build.SourcesDirectory)/eng/* -Destination ./ -Recurse -Force echo "##vso[task.setvariable variable=toolPath]$(Build.BinariesDirectory)" workingDirectory: $(Build.BinariesDirectory) - displayName: Move eng/common to Tool Directory + displayName: Move eng/common to Tool Directory - task: PublishPipelineArtifact@0 condition: succeeded() inputs: artifactName: "Doc.Index" targetPath: $(Build.ArtifactStagingDirectory)/docfx_project/_site - + - pwsh: | git checkout -b gh-pages-local --track origin/gh-pages-root -f workingDirectory: $(Build.SourcesDirectory) diff --git a/eng/common/pipelines/templates/jobs/npm-publish.yml b/eng/common/pipelines/templates/jobs/npm-publish.yml new file mode 100644 index 000000000000..9909ace96e9f --- /dev/null +++ b/eng/common/pipelines/templates/jobs/npm-publish.yml @@ -0,0 +1,132 @@ +parameters: + Tag: 'auto' + ArtifactName: 'packages' + ArtifactSubPath: '' + DeploymentName: 'PublishPackage' + DependsOn: [] + Environment: 'package-publish' + Registry: 'https://registry.npmjs.org/' + Pool: # hardcoding the pool and image name because deployment jobs do not support variable expansion in pool names + name: azsdk-pool + image: ubuntu-24.04 + os: linux + CustomCondition: succeeded() + FailOnMissingPackages: true + +jobs: +- deployment: ${{ parameters.DeploymentName }} + displayName: 'Publish ${{ parameters.ArtifactName }} to ${{ parameters.Registry }}' + condition: ${{ parameters.CustomCondition }} + environment: ${{ parameters.Environment }} + dependsOn: ${{ parameters.DependsOn }} + variables: + - name: ArtifactPath + value: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}/${{ parameters.ArtifactSubPath }} + + templateContext: + type: releaseJob + isProduction: ${{ eq(parameters.Registry, 'https://registry.npmjs.org/') }} + inputs: + - input: pipelineArtifact + artifactName: ${{ parameters.ArtifactName }} + itemPattern: '**/*.tgz' + targetPath: $(Pipeline.Workspace)/${{ parameters.ArtifactName }}/ + + pool: ${{ parameters.Pool }} + + strategy: + runOnce: + deploy: + steps: + - pwsh: | + $containsBeta = $false + $containsPackages = $false + foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) { + if ($package.Name -match "[\d\.]+-[a-zA-Z]+") { $containsBeta = $true } + Write-Host "Publishing $package to ${{ parameters.Registry }}" + $containsPackages = $true + } + if (!$containsPackages) { + Write-Host "##vso[task.setvariable variable=SkipPublishing]true" + if ("${{ parameters.FailOnMissingPackages }}" -eq 'true') { + Write-Error "No packages found to publish, but FailOnMissingPackages is set to true. Failing the job." + exit 1 + } + else { + Write-Host "No packages found to publish in $(ArtifactPath), so skipping publishing." + exit 0 + } + } + + $tag = '${{ parameters.Tag }}' + if ($tag -eq '' -or $tag -eq 'auto') { + $tag = 'latest' + # If the package is prerelease publish it under 'beta' tag + if ($containsBeta) { $tag = 'beta'} + } + Write-Host "##vso[task.setvariable variable=TagName]$tag" + Write-Host "Publishing packages with tag: $tag" + displayName: 'Packages to be published' + + - ${{ if eq(parameters.Registry, 'https://registry.npmjs.org/') }}: + - task: EsrpRelease@9 + displayName: 'Publish ${{ parameters.ArtifactName }} via ESRP' + condition: and(succeeded(), ne(variables['SkipPublishing'], 'true')) + inputs: + ConnectedServiceName: 'Azure SDK PME Managed Identity' + ClientId: '5f81938c-2544-4f1f-9251-dd9de5b8a81b' + DomainTenantId: '975f013f-7f24-47e8-a7d3-abc4752bf346' + Usemanagedidentity: true + KeyVaultName: 'kv-azuresdk-codesign' + SignCertName: 'azure-sdk-esrp-release-certificate' + Intent: 'PackageDistribution' + ContentType: 'npm' + FolderLocation: $(ArtifactPath) + Owners: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} + Approvers: ${{ coalesce(variables['Build.RequestedForEmail'], 'azuresdk@microsoft.com') }} + ServiceEndpointUrl: 'https://api.esrp.microsoft.com' + MainPublisher: 'ESRPRELPACMANTEST' + productstate: $(TagName) + + - pwsh: | + foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) { + $packageJson = tar -xOf $package "package/package.json" | ConvertFrom-Json + if (!$packageJson) { + Write-Warning "Could not read package.json from $package" + continue; + } + Write-Host "Verifying tag '$(TagName)' is set for '$($packageJson.name)' version '$($packageJson.version)'" + $packageTags = npm view $packageJson.name "dist-tags" -json -silent | ConvertFrom-Json + if ($LASTEXITCODE -ne 0 -or !$packageTags) { + Write-Warning "Failed to retrieve dist-tags for $packageJson.name. It is possible the package hasn't been indexed yet so ignoring." + continue + } + + if ($packageTags."$(TagName)" -ne $packageJson.version) { + Write-Error "The dist-tag '$(TagName)' for package '$($packageJson.name)' is not correctly set something must have gone wrong during the ESRP release process." + exit 1 + } + } + displayName: 'Verify tag after ESRP release' + condition: and(succeeded(), ne(variables['SkipPublishing'], 'true')) + + - ${{ else }}: + - template: /eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml + parameters: + npmrcPath: $(ArtifactPath)/.npmrc + registryUrl: ${{ parameters.Registry }} + CustomCondition: and(succeeded(), ne(variables['SkipPublishing'], 'true')) + + - pwsh: | + foreach ($package in (dir $(ArtifactPath) *.tgz -Recurse)) { + Write-Host "npm publish $package --verbose --access public --tag $(TagName) --registry ${{ parameters.Registry }}" + npm publish $package --verbose --access public --tag $(TagName) --registry ${{ parameters.Registry }} + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to publish $package to ${{ parameters.Registry }}" + exit $LASTEXITCODE + } + } + displayName: 'Publish ${{ parameters.ArtifactName }}' + condition: and(succeeded(), ne(variables['SkipPublishing'], 'true')) + workingDirectory: $(ArtifactPath) + diff --git a/eng/common/pipelines/templates/jobs/prepare-pipelines.yml b/eng/common/pipelines/templates/jobs/prepare-pipelines.yml index f2a29a49b2f0..0e54b11a1e87 100644 --- a/eng/common/pipelines/templates/jobs/prepare-pipelines.yml +++ b/eng/common/pipelines/templates/jobs/prepare-pipelines.yml @@ -88,6 +88,7 @@ jobs: switch ($lang) { "java" { + $generatePublicCIPipeline = 'false' $internalVariableGroups = '$(AzureSDK_Maven_Release_Pipeline_Secrets) $(Release_Secrets_for_GitHub) $(APIReview_AutoCreate_Configurations)' $testVariableGroups = '$(Secrets_for_Resource_Provisioner)' } @@ -110,6 +111,7 @@ jobs: $generateUnifiedWeekly = 'true' } "net" { + $generatePublicCIPipeline = 'false' $internalVariableGroups = '$(AzureSDK_Nuget_Release_Pipeline_Secrets) $(Release_Secrets_for_GitHub) $(APIReview_AutoCreate_Configurations)' $testVariableGroups = '$(Secrets_for_Resource_Provisioner)' $internalServiceConnections += ' "Azure SDK Symbols Publishing" Nuget.org' @@ -121,6 +123,7 @@ jobs: $internalVariableGroups = '$(Release_Secrets_for_GitHub) $(APIReview_AutoCreate_Configurations) $(Secrets_for_Resource_Provisioner) $(AzureSDK_CocoaPods_Release_Pipeline_Secrets)' } "go" { + $generatePublicCIPipeline = 'false' $internalVariableGroups = '$(Release_Secrets_for_GitHub) $(APIReview_AutoCreate_Configurations) $(Secrets_for_Resource_Provisioner)' $generateUnifiedWeekly = 'true' } diff --git a/eng/common/pipelines/templates/stages/archetype-sdk-tool-pwsh.yml b/eng/common/pipelines/templates/stages/archetype-sdk-tool-pwsh.yml index e068fbd1d7b8..d76b0392df98 100644 --- a/eng/common/pipelines/templates/stages/archetype-sdk-tool-pwsh.yml +++ b/eng/common/pipelines/templates/stages/archetype-sdk-tool-pwsh.yml @@ -10,6 +10,9 @@ parameters: - name: TargetTags type: string default: '' + - name: PreTestSteps + type: object + default: [] variables: - template: /eng/pipelines/templates/variables/globals.yml @@ -22,20 +25,19 @@ stages: strategy: matrix: Windows: - Pool: azsdk-pool-mms-win-2022-general + Pool: azsdk-pool Image: windows-2022 Linux: - Pool: azsdk-pool-mms-ubuntu-2204-general - Image: ubuntu-22.04 - Mac: - Pool: Azure Pipelines - Image: macos-latest + Pool: azsdk-pool + Image: ubuntu-24.04 pool: name: $(Pool) - vmImage: $(Image) + demands: ImageOverride -equals $(Image) steps: + - ${{ parameters.PreTestSteps }} + - template: /eng/common/pipelines/templates/steps/run-pester-tests.yml parameters: TargetDirectory: ${{ parameters.TargetDirectory }} diff --git a/eng/common/pipelines/templates/steps/check-spelling.yml b/eng/common/pipelines/templates/steps/check-spelling.yml index a25fd9444118..8d7a716cbdfc 100644 --- a/eng/common/pipelines/templates/steps/check-spelling.yml +++ b/eng/common/pipelines/templates/steps/check-spelling.yml @@ -15,15 +15,9 @@ parameters: steps: - ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: - - task: NodeTool@0 - condition: and(succeededOrFailed(), ne(variables['Skip.SpellCheck'],'true')) - inputs: - versionSpec: 18.x - displayName: Use Node.js 18.x - - task: PowerShell@2 displayName: Check spelling (cspell) - condition: and(succeededOrFailed(), ne(variables['Skip.SpellCheck'],'true')) + condition: and(succeeded(), ne(variables['Skip.SpellCheck'],'true')) continueOnError: ${{ parameters.ContinueOnError }} inputs: targetType: filePath diff --git a/eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml b/eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml index 4b6f08359bd9..52379684c4ea 100644 --- a/eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml +++ b/eng/common/pipelines/templates/steps/create-authenticated-npmrc.yml @@ -3,6 +3,9 @@ parameters: type: string - name: registryUrl type: string + - name: CustomCondition + type: string + default: succeeded() steps: - pwsh: | @@ -17,7 +20,9 @@ steps: $content = "registry=${{ parameters.registryUrl }}`n`nalways-auth=true" $content | Out-File '${{ parameters.npmrcPath }}' displayName: 'Create .npmrc' + condition: ${{ parameters.CustomCondition }} - task: npmAuthenticate@0 displayName: Authenticate .npmrc + condition: ${{ parameters.CustomCondition }} inputs: workingFile: ${{ parameters.npmrcPath }} diff --git a/eng/common/pipelines/templates/steps/detect-api-changes.yml b/eng/common/pipelines/templates/steps/detect-api-changes.yml index 2525dade5205..d997caa84915 100644 --- a/eng/common/pipelines/templates/steps/detect-api-changes.yml +++ b/eng/common/pipelines/templates/steps/detect-api-changes.yml @@ -5,24 +5,25 @@ parameters: Condition: true steps: - - pwsh: | - $apiChangeDetectRequestUrl = "https://apiview.dev/PullRequest/DetectApiChanges" - echo "##vso[task.setvariable variable=ApiChangeDetectRequestUrl]$apiChangeDetectRequestUrl" - displayName: "Set API change detect request URL" - condition: and(${{ parameters.Condition}}, eq(variables['ApiChangeDetectRequestUrl'], '')) + - ${{ if eq(variables['Build.Reason'],'PullRequest') }}: + - pwsh: | + $apiChangeDetectRequestUrl = "https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges" + echo "##vso[task.setvariable variable=ApiChangeDetectRequestUrl]$apiChangeDetectRequestUrl" + displayName: "Set API change detect request URL" + condition: and(${{ parameters.Condition}}, eq(variables['ApiChangeDetectRequestUrl'], '')) - - task: Powershell@2 - inputs: - filePath: ${{ parameters.RepoRoot }}/eng/common/scripts/Detect-Api-Changes.ps1 - arguments: > - -ArtifactPath ${{parameters.ArtifactPath}} - -CommitSha '$(System.PullRequest.SourceCommitId)' - -BuildId $(Build.BuildId) - -PullRequestNumber $(System.PullRequest.PullRequestNumber) - -RepoFullName $(Build.Repository.Name) - -APIViewUri $(ApiChangeDetectRequestUrl) - -ArtifactName ${{ parameters.ArtifactName }} - -DevopsProject $(System.TeamProject) - pwsh: true - displayName: Detect API changes - condition: and(${{ parameters.Condition }}, succeededOrFailed(), eq(variables['Build.Reason'],'PullRequest')) + - task: Powershell@2 + inputs: + filePath: ${{ parameters.RepoRoot }}/eng/common/scripts/Detect-Api-Changes.ps1 + arguments: > + -ArtifactPath ${{parameters.ArtifactPath}} + -CommitSha '$(System.PullRequest.SourceCommitId)' + -BuildId $(Build.BuildId) + -PullRequestNumber $(System.PullRequest.PullRequestNumber) + -RepoFullName $(Build.Repository.Name) + -APIViewUri $(ApiChangeDetectRequestUrl) + -ArtifactName ${{ parameters.ArtifactName }} + -DevopsProject $(System.TeamProject) + pwsh: true + displayName: Create APIView if API has changes + condition: and(${{ parameters.Condition }}, succeededOrFailed()) diff --git a/eng/common/pipelines/templates/steps/eng-common-workflow-enforcer.yml b/eng/common/pipelines/templates/steps/eng-common-workflow-enforcer.yml index 1524ae8e1b8b..c6b54acb3a6a 100644 --- a/eng/common/pipelines/templates/steps/eng-common-workflow-enforcer.yml +++ b/eng/common/pipelines/templates/steps/eng-common-workflow-enforcer.yml @@ -11,8 +11,8 @@ steps: if ((!"$(System.PullRequest.SourceBranch)".StartsWith("sync-eng/common")) -and "$(System.PullRequest.TargetBranch)" -match "^(refs/heads/)?$(DefaultBranch)$") { - $filesInCommonDir = & "eng/common/scripts/get-changedfiles.ps1" -DiffPath 'eng/common/*' -DiffFilterType "" - if (($LASTEXITCODE -eq 0) -and ($filesInCommonDir.Count -gt 0)) + $filesInCommonDir = & "eng/common/scripts/get-changedfiles.ps1" -DiffPath 'eng/common/*' -DiffFilterType "" + if ($filesInCommonDir.Count -gt 0) { Write-Host "##vso[task.LogIssue type=error;]Changes to files under 'eng/common' directory should not be made in this Repo`n${filesInCommonDir}" Write-Host "##vso[task.LogIssue type=error;]Please follow workflow at https://github.com/Azure/azure-sdk-tools/blob/main/doc/common/common_engsys.md" @@ -21,8 +21,13 @@ steps: } if ((!"$(System.PullRequest.SourceBranch)".StartsWith("sync-.github/workflows")) -and "$(System.PullRequest.TargetBranch)" -match "^(refs/heads/)?$(DefaultBranch)$") { - $filesInCommonDir = & "eng/common/scripts/get-changedfiles.ps1" -DiffPath '.github/workflows/*event*' -DiffFilterType "" - if (($LASTEXITCODE -eq 0) -and ($filesInCommonDir.Count -gt 0)) + # This list needs to be kept in sync with the FilePatterns listed in eng/pipelines/eng-workflows-sync.yml + $filePatterns = @(".github/workflows/*event*", ".github/workflows/post-apiview.yml") + $filesInCommonDir = @() + foreach ($filePattern in $filePatterns) { + $filesInCommonDir += & "eng/common/scripts/get-changedfiles.ps1" -DiffPath $filePattern -DiffFilterType "" + } + if ($filesInCommonDir.Count -gt 0) { Write-Host "##vso[task.LogIssue type=error;]Changes to files under '.github/workflows' directory should not be made in this Repo`n${filesInCommonDir}" Write-Host "##vso[task.LogIssue type=error;]Please follow workflow at https://github.com/Azure/azure-sdk-tools/blob/main/doc/workflows/engsys_workflows.md" diff --git a/eng/common/pipelines/templates/steps/publish-1es-artifact.yml b/eng/common/pipelines/templates/steps/publish-1es-artifact.yml index 15663cce30d0..30d38828e720 100644 --- a/eng/common/pipelines/templates/steps/publish-1es-artifact.yml +++ b/eng/common/pipelines/templates/steps/publish-1es-artifact.yml @@ -31,6 +31,8 @@ steps: inputs: artifactName: '$(PublishArtifactName)' targetPath: '${{ parameters.ArtifactPath }}' - # Disable sbom generation by default for forked PR builds to avoid a bunch of warnings - ${{ if not(and(eq(variables['Build.Reason'],'PullRequest'), eq(variables['System.PullRequest.IsFork'], 'True'))) }}: + # Disable sbom generation by default for our public or pull request validation builds to avoid unnecessary work + ${{ if or(eq(variables['System.TeamProject'], 'public'), eq(variables['Build.Reason'], 'PullRequest')) }}: + sbomEnabled: false + ${{ else }}: sbomEnabled: ${{ parameters.SbomEnabled }} diff --git a/eng/common/pipelines/templates/steps/save-package-properties.yml b/eng/common/pipelines/templates/steps/save-package-properties.yml index fff6f3093633..d3a1177aced5 100644 --- a/eng/common/pipelines/templates/steps/save-package-properties.yml +++ b/eng/common/pipelines/templates/steps/save-package-properties.yml @@ -11,6 +11,9 @@ parameters: - name: TargetPath type: string default: $(Build.SourcesDirectory) + - name: WorkingDirectory + type: string + default: $(Build.SourcesDirectory) - name: ScriptDirectory type: string default: eng/common/scripts @@ -36,12 +39,28 @@ steps: -ArtifactPath '${{ parameters.DiffDirectory }}' -ExcludePaths ('${{ convertToJson(parameters.ExcludePaths) }}' | ConvertFrom-Json) pwsh: true + workingDirectory: '${{ parameters.WorkingDirectory }}' + + - task: Powershell@2 + displayName: Save package properties filtered for PR + inputs: + filePath: ${{ parameters.ScriptDirectory }}/Save-Package-Properties.ps1 + arguments: > + -PrDiff '${{ parameters.DiffDirectory }}/diff.json' + -OutDirectory '${{ parameters.PackageInfoDirectory }}' + pwsh: true + workingDirectory: '${{ parameters.WorkingDirectory }}' # When running in PR mode, we want the detected changed services to be attached to the build as tags. # However, the public identity does not have the permissions to attach tags to the build. # Instead, we will save the changed services to a file, attach it as an attachment for PiplineWitness to pick up and utilize. + # - pwsh: | - $changedServices = (Get-Content -Path '${{ parameters.DiffDirectory }}/diff.json' -Raw | ConvertFrom-Json).ChangedServices + $changedPackages = Get-ChildItem -Recurse -Filter *.json "${{ parameters.PackageInfoDirectory }}" ` + | ForEach-Object { Get-Content -Raw $_ | ConvertFrom-Json } + + $changedServices = $changedPackages | Where-Object { $_.IncludedForValidation -eq $false } ` + | Select-Object -ExpandProperty ServiceDirectory | Sort-Object -Unique if ($changedServices) { Write-Host "Attaching changed service names to the build for additional tag generation." @@ -49,15 +68,8 @@ steps: Write-Host '##vso[task.addattachment type=AdditionalTags;name=AdditionalTags;]$(System.DefaultWorkingDirectory)/tags.json' } displayName: Upload tags.json with changed services + workingDirectory: '${{ parameters.WorkingDirectory }}' - - task: Powershell@2 - displayName: Save package properties filtered for PR - inputs: - filePath: ${{ parameters.ScriptDirectory }}/Save-Package-Properties.ps1 - arguments: > - -PrDiff '${{ parameters.DiffDirectory }}/diff.json' - -OutDirectory '${{ parameters.PackageInfoDirectory }}' - pwsh: true - ${{ else }}: - task: Powershell@2 displayName: Save package properties @@ -68,3 +80,4 @@ steps: -OutDirectory '${{ parameters.PackageInfoDirectory }}' -AddDevVersion:($env:SETDEVVERSION -eq 'true') pwsh: true + workingDirectory: '${{ parameters.WorkingDirectory }}' diff --git a/eng/common/pipelines/templates/steps/sparse-checkout.yml b/eng/common/pipelines/templates/steps/sparse-checkout.yml index d3992a85e3f6..d7f1bc0d1246 100644 --- a/eng/common/pipelines/templates/steps/sparse-checkout.yml +++ b/eng/common/pipelines/templates/steps/sparse-checkout.yml @@ -14,6 +14,9 @@ parameters: - name: TokenToUseForAuth type: string default: '' + - name: PreserveAuthToken + type: boolean + default: false steps: - ${{ if not(parameters.SkipCheckoutNone) }}: @@ -137,7 +140,7 @@ steps: pwsh: true workingDirectory: $(System.DefaultWorkingDirectory) - - ${{ if ne(parameters.TokenToUseForAuth, '') }}: + - ${{ if and(ne(parameters.TokenToUseForAuth, ''), not(parameters.PreserveAuthToken)) }}: - pwsh: | git config unset --global "http.extraheader" displayName: Removing git config auth header diff --git a/eng/common/pipelines/templates/steps/verify-links.yml b/eng/common/pipelines/templates/steps/verify-links.yml index 4d84e124a87f..896b30d0fe38 100644 --- a/eng/common/pipelines/templates/steps/verify-links.yml +++ b/eng/common/pipelines/templates/steps/verify-links.yml @@ -28,6 +28,7 @@ steps: -ignoreLinksFile ${{ parameters.IgnoreLinksFile }} -branchReplaceRegex "${{ parameters.BranchReplaceRegex }}" -branchReplacementName ${{ parameters.BranchReplacementName }} - -devOpsLogging: $true -checkLinkGuidance: ${{ parameters.CheckLinkGuidance }} + -localBuildRepoName "$env:BUILD_REPOSITORY_NAME" + -localBuildRepoPath $(Build.SourcesDirectory) -inputCacheFile "https://azuresdkartifacts.blob.core.windows.net/verify-links-cache/verify-links-cache.txt" diff --git a/eng/common/scripts/ChangeLog-Operations.ps1 b/eng/common/scripts/ChangeLog-Operations.ps1 index 3d159ad8be74..f29fc12068dc 100644 --- a/eng/common/scripts/ChangeLog-Operations.ps1 +++ b/eng/common/scripts/ChangeLog-Operations.ps1 @@ -19,7 +19,7 @@ function Get-ChangeLogEntries { LogError "ChangeLog[${ChangeLogLocation}] does not exist" return $null } - LogDebug "Extracting entries from [${ChangeLogLocation}]." + Write-Verbose "Extracting entries from [${ChangeLogLocation}]." return Get-ChangeLogEntriesFromContent (Get-Content -Path $ChangeLogLocation) } diff --git a/eng/common/scripts/Detect-Api-Changes.ps1 b/eng/common/scripts/Detect-Api-Changes.ps1 index 6be93332118c..25f3101a8b9b 100644 --- a/eng/common/scripts/Detect-Api-Changes.ps1 +++ b/eng/common/scripts/Detect-Api-Changes.ps1 @@ -55,11 +55,21 @@ function Submit-Request($filePath, $packageName) } $uri = [System.UriBuilder]$APIViewUri $uri.query = $query.toString() + + $correlationId = [System.Guid]::NewGuid().ToString() + $headers = @{ + "x-correlation-id" = $correlationId + } LogInfo "Request URI: $($uri.Uri.OriginalString)" + LogInfo "Correlation ID: $correlationId" try { - $Response = Invoke-WebRequest -Method 'GET' -Uri $uri.Uri -MaximumRetryCount 3 + $Response = Invoke-WebRequest -Method 'GET' -Uri $uri.Uri -Headers $headers -MaximumRetryCount 3 $StatusCode = $Response.StatusCode + if ($Response.Headers['Content-Type'] -like 'application/json*') { + $responseContent = $Response.Content | ConvertFrom-Json | ConvertTo-Json -Depth 10 + LogSuccess $responseContent + } } catch { diff --git a/eng/common/scripts/Helpers/ApiView-Helpers.ps1 b/eng/common/scripts/Helpers/ApiView-Helpers.ps1 index d84ef5dd28b4..e8d867db9e95 100644 --- a/eng/common/scripts/Helpers/ApiView-Helpers.ps1 +++ b/eng/common/scripts/Helpers/ApiView-Helpers.ps1 @@ -1,3 +1,5 @@ +. ${PSScriptRoot}\..\logging.ps1 + function MapLanguageToRequestParam($language) { $lang = $language @@ -130,10 +132,11 @@ function Set-ApiViewCommentForRelatedIssues { . ${PSScriptRoot}\..\common.ps1 $issuesForCommit = $null try { - $issuesForCommit = Search-GitHubIssues -CommitHash $HeadCommitish + $issuesForCommit = Search-GitHubIssues -CommitHash $HeadCommitish -AuthToken $AuthToken if ($issuesForCommit.items.Count -eq 0) { - LogError "No issues found for commit: $HeadCommitish" - exit 1 + LogInfo "No issues found for commit: $HeadCommitish" + Write-Host "##vso[task.complete result=SucceededWithIssues;]DONE" + exit 0 } } catch { LogError "No issues found for commit: $HeadCommitish" @@ -165,28 +168,42 @@ function Set-ApiViewCommentForPR { $apiviewEndpoint = "$APIViewHost/api/pullrequests?pullRequestNumber=$PrNumber&repoName=$repoFullName&commitSHA=$HeadCommitish" LogDebug "Get APIView information for PR using endpoint: $apiviewEndpoint" + $correlationId = [System.Guid]::NewGuid().ToString() + $headers = @{ + "x-correlation-id" = $correlationId + } + LogInfo "Correlation ID: $correlationId" + $commentText = @() $commentText += "## API Change Check" try { - $response = Invoke-RestMethod -Uri $apiviewEndpoint -Method Get -MaximumRetryCount 3 - if ($response.Count -eq 0) { - LogWarning "API changes are not detected in this pull request." - $commentText += "" - $commentText += "API changes are not detected in this pull request." + $response = Invoke-WebRequest -Uri $apiviewEndpoint -Method Get -Headers $headers -MaximumRetryCount 3 + LogInfo "OperationId: $($response.Headers['X-Operation-Id'])" + if ($response.StatusCode -ne 200) { + LogInfo "API changes are not detected in this pull request." + exit 0 } else { LogSuccess "APIView identified API level changes in this PR and created $($response.Count) API reviews" $commentText += "" $commentText += "APIView identified API level changes in this PR and created the following API reviews" $commentText += "" - $commentText += "| Language | API Review for Package |" - $commentText += "|----------|---------|" - $response | ForEach-Object { - $commentText += "| $($_.language) | [$($_.packageName)]($($_.url)) |" + + $responseContent = $response.Content | ConvertFrom-Json + if ($RepoName.StartsWith(("azure-sdk-for-"))) { + $responseContent | ForEach-Object { + $commentText += "[$($_.packageName)]($($_.url))" + } + } else { + $commentText += "| Language | API Review for Package |" + $commentText += "|----------|---------|" + $responseContent | ForEach-Object { + $commentText += "| $($_.language) | [$($_.packageName)]($($_.url)) |" + } } } } catch{ - LogError "Failed to get API View information for PR: $PrNumber in repo: $repoFullName with commitSHA: $Commitish. Error: $_" + LogError "Failed to get API View information for PR: $PrNumber in repo: $repoFullName with commitSHA: $HeadCommitish. Error: $_" exit 1 } @@ -197,12 +214,12 @@ function Set-ApiViewCommentForPR { try { $existingComment = Get-GitHubIssueComments -RepoOwner $RepoOwner -RepoName $RepoName -IssueNumber $PrNumber -AuthToken $AuthToken - $existingAPIViewComment = $existingComment | Where-Object { + $existingAPIViewComment = $existingComment | Where-Object { $_.body.StartsWith("**API Change Check**", [StringComparison]::OrdinalIgnoreCase) -or $_.body.StartsWith("## API Change Check", [StringComparison]::OrdinalIgnoreCase) } } catch { LogWarning "Failed to get comments from Pull Request: $PrNumber in repo: $repoFullName" } - + try { if ($existingAPIViewComment) { LogDebug "Updating existing APIView comment..." @@ -220,3 +237,72 @@ function Set-ApiViewCommentForPR { exit 1 } } + +# Helper function used to create API review requests for Spec generation SDKs pipelines +function Create-API-Review { + param ( + [string]$apiviewEndpoint = "https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges", + [string]$specGenSDKArtifactPath, + [string]$apiviewArtifactName, + [string]$buildId, + [string]$commitish, + [string]$repoName, + [string]$pullRequestNumber + ) + $specGenSDKContent = Get-Content -Path $SpecGenSDKArtifactPath -Raw | ConvertFrom-Json + $language = ($specGenSDKContent.language -split "-")[-1] + + foreach ($requestData in $specGenSDKContent.apiViewRequestData) { + $requestUri = [System.UriBuilder]$apiviewEndpoint + $requestParam = [System.Web.HttpUtility]::ParseQueryString('') + $requestParam.Add('artifactName', $apiviewArtifactName) + $requestParam.Add('buildId', $buildId) + $requestParam.Add('commitSha', $commitish) + $requestParam.Add('repoName', $repoName) + $requestParam.Add('pullRequestNumber', $pullRequestNumber) + $requestParam.Add('packageName', $requestData.packageName) + $requestParam.Add('filePath', $requestData.filePath) + if ($language -ieq "python") { + $requestParam.Add('codeFile', (Split-Path -Path $requestData.filePath -Leaf)) + } + $requestParam.Add('language', $language) + $requestUri.query = $requestParam.toString() + $correlationId = [System.Guid]::NewGuid().ToString() + + $headers = @{ + "x-correlation-id" = $correlationId + } + + LogInfo "Request URI: $($requestUri.Uri.OriginalString)" + LogInfo "Correlation ID: $correlationId" + + try + { + $response = Invoke-WebRequest -Method 'GET' -Uri $requestUri.Uri -Headers $headers -MaximumRetryCount 3 + if ($response.StatusCode -eq 201 -or $response.StatusCode -eq 208) { + if ($response.StatusCode -eq 201) { + LogSuccess "Status Code: $($response.StatusCode)`nAPI review request created successfully" + } + elseif ($response.StatusCode -eq 208) { + LogSuccess "Status Code: $($response.StatusCode)`nThere is no API change compared with the previous version." + } + if ($response.Headers['Content-Type'] -like 'application/json*') { + $responseContent = $response.Content | ConvertFrom-Json | ConvertTo-Json -Depth 10 + LogSuccess "Response:`n$($responseContent)" + } + else { + LogSuccess "Response: $($response.Content)" + } + } + else { + LogError "Failed to create API review request. $($response)" + exit 1 + } + } + catch + { + LogError "Error : $($_.Exception)" + exit 1 + } + } +} \ No newline at end of file diff --git a/eng/common/scripts/Helpers/AzSdkTool-Helpers.ps1 b/eng/common/scripts/Helpers/AzSdkTool-Helpers.ps1 new file mode 100644 index 000000000000..4fa981196139 --- /dev/null +++ b/eng/common/scripts/Helpers/AzSdkTool-Helpers.ps1 @@ -0,0 +1,194 @@ +Set-StrictMode -Version 4 + +function Get-SystemArchitecture { + $unameOutput = uname -m + switch ($unameOutput) { + "x86_64" { return "X86_64" } + "aarch64" { return "ARM64" } + "arm64" { return "ARM64" } + default { throw "Unable to determine system architecture. uname -m returned $unameOutput." } + } +} + +function Get-Package-Meta( + [Parameter(mandatory = $true)] + $FileName, + [Parameter(mandatory = $true)] + $Package +) { + $ErrorActionPreferenceDefault = $ErrorActionPreference + $ErrorActionPreference = "Stop" + + $AVAILABLE_BINARIES = @{ + "Windows" = @{ + "AMD64" = @{ + "system" = "Windows" + "machine" = "AMD64" + "file_name" = "$FileName-standalone-win-x64.zip" + "executable" = "$Package.exe" + } + } + "Linux" = @{ + "X86_64" = @{ + "system" = "Linux" + "machine" = "X86_64" + "file_name" = "$FileName-standalone-linux-x64.tar.gz" + "executable" = "$Package" + } + "ARM64" = @{ + "system" = "Linux" + "machine" = "ARM64" + "file_name" = "$FileName-standalone-linux-arm64.tar.gz" + "executable" = "$Package" + } + } + "Darwin" = @{ + "X86_64" = @{ + "system" = "Darwin" + "machine" = "X86_64" + "file_name" = "$FileName-standalone-osx-x64.zip" + "executable" = "$Package" + } + "ARM64" = @{ + "system" = "Darwin" + "machine" = "ARM64" + "file_name" = "$FileName-standalone-osx-arm64.zip" + "executable" = "$Package" + } + } + } + + if ($IsWindows) { + $os = "Windows" + # we only support x64 on windows, if that doesn't work the platform is unsupported + $machine = "AMD64" + } + elseif ($IsLinux) { + $os = "Linux" + $machine = Get-SystemArchitecture + } + elseif ($IsMacOS) { + $os = "Darwin" + $machine = Get-SystemArchitecture + } + else { + $os = "unknown" + } + + $ErrorActionPreference = $ErrorActionPreferenceDefault + + return $AVAILABLE_BINARIES[$os][$machine] +} + +function Clear-Directory ($path) { + if (Test-Path -Path $path) { + Remove-Item -Path $path -Recurse -Force + } + New-Item -ItemType Directory -Path $path -Force +} + +function isNewVersion( + [Parameter(mandatory = $true)] + $Version, + [Parameter(mandatory = $true)] + $Directory +) { + $savedVersionTxt = Join-Path $Directory "downloaded_version.txt" + if (Test-Path $savedVersionTxt) { + $result = (Get-Content -Raw $savedVersionTxt).Trim() + + if ($result -eq $Version) { + return $false + } + } + + return $true +} + +<# +.SYNOPSIS +Installs a standalone version of an engsys tool. +.PARAMETER Version +The version of the tool to install. Requires a full version to be provided. EG "1.0.0-dev.20240617.1" +.PARAMETER Directory +The directory within which the exe will exist after this function invokes. Defaults to "." +#> +function Install-Standalone-Tool ( + [Parameter()] + [string]$Version, + [Parameter(mandatory = $true)] + [string]$FileName, + [Parameter(mandatory = $true)] + [string]$Package, + [Parameter()] + [string]$Repository = "Azure/azure-sdk-tools", + [Parameter()] + $Directory = "." +) { + $ErrorActionPreference = "Stop" + $PSNativeCommandUseErrorActionPreference = $true + + $systemDetails = Get-Package-Meta -FileName $FileName -Package $Package + + if (!(Test-Path $Directory) -and $Directory -ne ".") { + New-Item -ItemType Directory -Path $Directory -Force | Out-Null + } + + $tag = "${Package}_${Version}" + + if (!$Version -or $Version -eq "*") { + Write-Host "Attempting to find latest version for package '$Package'" + $releasesUrl = "https://api.github.com/repos/$Repository/releases" + $releases = Invoke-RestMethod -Uri $releasesUrl + $found = $false + foreach ($release in $releases) { + if ($release.tag_name -like "$Package*") { + $tag = $release.tag_name + $Version = $release.tag_name -replace "${Package}_", "" + $found = $true + break + } + } + if ($found -eq $false) { + throw "No release found for package '$Package'" + } + } + + $downloadFolder = Resolve-Path $Directory + $downloadUrl = "https://github.com/$Repository/releases/download/$tag/$($systemDetails.file_name)" + $downloadFile = $downloadUrl.Split('/')[-1] + $downloadLocation = Join-Path $downloadFolder $downloadFile + $savedVersionTxt = Join-Path $downloadFolder "downloaded_version.txt" + $executable_path = Join-Path $downloadFolder $systemDetails.executable + + if (isNewVersion $version $downloadFolder) { + Write-Host "Installing '$Package' '$Version' to '$downloadFolder' from $downloadUrl" + Invoke-WebRequest -Uri $downloadUrl -OutFile $downloadLocation + + if ($downloadFile -like "*.zip") { + Expand-Archive -Path $downloadLocation -DestinationPath $downloadFolder -Force + } + elseif ($downloadFile -like "*.tar.gz") { + tar -xzf $downloadLocation -C $downloadFolder + } + else { + throw "Unsupported file format" + } + + # Remove the downloaded file after extraction + Remove-Item -Path $downloadLocation -Force + + # Record downloaded version + Set-Content -Path $savedVersionTxt -Value $Version + + # Set executable permissions if on macOS (Darwin) + if ($IsMacOS) { + chmod 755 $executable_path + } + } + else { + Write-Host "Target version '$Version' already present in target directory '$downloadFolder'" + } + + return $executable_path +} diff --git a/eng/common/scripts/Invoke-GitHubAPI.ps1 b/eng/common/scripts/Invoke-GitHubAPI.ps1 index 556efd64a9b4..0e5bace3e48c 100644 --- a/eng/common/scripts/Invoke-GitHubAPI.ps1 +++ b/eng/common/scripts/Invoke-GitHubAPI.ps1 @@ -560,12 +560,17 @@ function Search-GitHubIssues { [ValidateNotNullOrEmpty()] [Parameter(Mandatory = $true)] $CommitHash, - $State="open" + $State="open", + $AuthToken ) $uri = "https://api.github.com/search/issues?q=sha:$CommitHash+state:$State" - - return Invoke-RestMethod ` - -Method GET ` - -Uri $uri ` - -MaximumRetryCount 3 + $params = @{ + Method = 'GET' + Uri = $uri + MaximumRetryCount = 3 + } + if ($AuthToken) { + $params.Headers = Get-GitHubApiHeaders -token $AuthToken + } + return Invoke-RestMethod @params } diff --git a/eng/common/scripts/Package-Properties.ps1 b/eng/common/scripts/Package-Properties.ps1 index 88d96daa548f..0142017c8468 100644 --- a/eng/common/scripts/Package-Properties.ps1 +++ b/eng/common/scripts/Package-Properties.ps1 @@ -77,11 +77,13 @@ class PackageProps { $this.ArtifactName = $artifactName $this.Initialize($name, $version, $directoryPath, $serviceDirectory) } - hidden [PSCustomObject]ParseYmlForArtifact([string]$ymlPath) { + + hidden [PSCustomObject]ParseYmlForArtifact([string]$ymlPath, [bool]$soleCIYml = $false) { $content = LoadFrom-Yaml $ymlPath if ($content) { $artifacts = GetValueSafelyFrom-Yaml $content @("extends", "parameters", "Artifacts") - $artifactForCurrentPackage = $null + $artifactForCurrentPackage = @{} + if ($artifacts) { # If there's an artifactName match that to the name field from the yml if ($this.ArtifactName) { @@ -98,8 +100,9 @@ class PackageProps { } } - # if we found an artifact for the current package, we should count this ci file as the source of the matrix for this package - if ($artifactForCurrentPackage) { + # if we found an artifact for the current package OR this is the sole ci.yml for the given service directory, + # we should count this ci file as the source of the matrix for this package + if ($artifactForCurrentPackage -or $soleCIYml) { $result = [PSCustomObject]@{ ArtifactConfig = [HashTable]$artifactForCurrentPackage ParsedYml = $content @@ -112,15 +115,33 @@ class PackageProps { return $null } - [PSCustomObject]GetCIYmlForArtifact() { + [System.IO.FileInfo[]]ResolveCIFolderPath() { $RepoRoot = Resolve-Path (Join-Path $PSScriptRoot ".." ".." "..") - $ciFolderPath = Join-Path -Path $RepoRoot -ChildPath (Join-Path "sdk" $this.ServiceDirectory) - $ciFiles = Get-ChildItem -Path $ciFolderPath -Filter "ci*.yml" -File + $ciFiles = @() + + # if this path exists, then we should look in it for the ci.yml files and return nothing if nothing is found + if (Test-Path $ciFolderPath){ + $ciFiles = @(Get-ChildItem -Path $ciFolderPath -Filter "ci*.yml" -File) + } + # if not, we should at least try to resolve the eng/ folder to fall back and see if that's where the path exists + else { + $ciFolderPath = Join-Path -Path $RepoRoot -ChildPath (Join-Path "eng" $this.ServiceDirectory) + if (Test-Path $ciFolderPath) { + $ciFiles = @(Get-ChildItem -Path $ciFolderPath -Filter "ci*.yml" -File) + } + } + + return $ciFiles + } + + [PSCustomObject]GetCIYmlForArtifact() { + $ciFiles = @($this.ResolveCIFolderPath()) $ciArtifactResult = $null + $soleCIYml = ($ciFiles.Count -eq 1) foreach ($ciFile in $ciFiles) { - $ciArtifactResult = $this.ParseYmlForArtifact($ciFile.FullName) + $ciArtifactResult = $this.ParseYmlForArtifact($ciFile.FullName, $soleCIYml) if ($ciArtifactResult) { break } @@ -137,7 +158,7 @@ class PackageProps { if (-not $this.ArtifactDetails) { $ciArtifactResult = $this.GetCIYmlForArtifact() - if ($ciArtifactResult) { + if ($ciArtifactResult -and $null -ne $ciArtifactResult.ArtifactConfig) { $this.ArtifactDetails = [Hashtable]$ciArtifactResult.ArtifactConfig $repoRoot = Resolve-Path (Join-Path $PSScriptRoot ".." ".." "..") @@ -147,27 +168,32 @@ class PackageProps { if (-not $this.ArtifactDetails["triggeringPaths"]) { $this.ArtifactDetails["triggeringPaths"] = @() } - else { - $adjustedPaths = @() - - # we need to convert relative references to absolute references within the repo - # this will make it extremely easy to compare triggering paths to files in the deleted+changed file list. - for ($i = 0; $i -lt $this.ArtifactDetails["triggeringPaths"].Count; $i++) { - $currentPath = $this.ArtifactDetails["triggeringPaths"][$i] - $newPath = Join-Path $repoRoot $currentPath - if (!$currentPath.StartsWith("/")) { - $newPath = Join-Path $repoRoot $relRoot $currentPath - } - # it is a possibility that users may have a triggerPath dependency on a file that no longer exists. - # before we resolve it to get rid of possible relative references, we should check if the file exists - # if it doesn't, we should just leave it as is. Otherwise we would _crash_ here when a user accidentally - # left a triggeringPath on a file that had been deleted - if (Test-Path $newPath) { - $adjustedPaths += (Resolve-Path -Path $newPath -Relative -RelativeBasePath $repoRoot).TrimStart(".").Replace("`\", "/") - } + + # if we know this is the matrix for our file, we should now see if there is a custom matrix config for the package + $serviceTriggeringPaths = GetValueSafelyFrom-Yaml $ciArtifactResult.ParsedYml @("extends", "parameters", "TriggeringPaths") + if ($serviceTriggeringPaths){ + $this.ArtifactDetails["triggeringPaths"] += $serviceTriggeringPaths + } + + $adjustedPaths = @() + + # we need to convert relative references to absolute references within the repo + # this will make it extremely easy to compare triggering paths to files in the deleted+changed file list. + for ($i = 0; $i -lt $this.ArtifactDetails["triggeringPaths"].Count; $i++) { + $currentPath = $this.ArtifactDetails["triggeringPaths"][$i] + $newPath = Join-Path $repoRoot $currentPath + if (!$currentPath.StartsWith("/")) { + $newPath = Join-Path $repoRoot $relRoot $currentPath + } + # it is a possibility that users may have a triggerPath dependency on a file that no longer exists. + # before we resolve it to get rid of possible relative references, we should check if the file exists + # if it doesn't, we should just leave it as is. Otherwise we would _crash_ here when a user accidentally + # left a triggeringPath on a file that had been deleted + if (Test-Path $newPath) { + $adjustedPaths += (Resolve-Path -Path $newPath -Relative -RelativeBasePath $repoRoot).TrimStart(".").Replace("`\", "/") } - $this.ArtifactDetails["triggeringPaths"] = $adjustedPaths } + $this.ArtifactDetails["triggeringPaths"] = $adjustedPaths $this.ArtifactDetails["triggeringPaths"] += $ciYamlPath $this.CIParameters["CIMatrixConfigs"] = @() @@ -215,6 +241,22 @@ function Get-PkgProperties { return $null } +function Get-PackagesFromPackageInfo([string]$PackageInfoFolder, [bool]$IncludeIndirect, [ScriptBlock]$CustomCompareFunction = $null) { + $packages = Get-ChildItem -R -Path $PackageInfoFolder -Filter "*.json" | ForEach-Object { + Get-Content $_.FullName | ConvertFrom-Json + } + + if (-not $includeIndirect) { + $packages = $packages | Where-Object { $_.IncludedForValidation -eq $false } + } + + if ($CustomCompareFunction) { + $packages = $packages | Where-Object { &$CustomCompareFunction $_ } + } + + return $packages +} + function Get-TriggerPaths([PSCustomObject]$AllPackageProperties) { $existingTriggeringPaths = @() @@ -246,7 +288,11 @@ function Update-TargetedFilesForTriggerPaths([string[]]$TargetedFiles, [string[] for ($i = 0; $i -lt $Triggers.Count; $i++) { $triggerPath = $Triggers[$i] - if ($triggerPath -and $file -eq "$triggerPath") { + # targeted files comes from the `changedPaths` property of the diff, which is + # a list of relative file paths from root. Not starting with a /. + # However, the triggerPaths are absolute paths, so we need to resolve the targeted file + # to the same format + if ($triggerPath -and "/$file" -eq "$triggerPath") { $isExistingTriggerPath = $true break } @@ -322,9 +368,9 @@ function Get-PrPkgProperties([string]$InputDiffJson) { # this is the primary loop that identifies the packages that have changes foreach ($pkg in $allPackageProperties) { - Write-Host "Processing changed files against $($pkg.Name). $pkgCounter of $($allPackageProperties.Count)." - $pkgDirectory = Resolve-Path "$($pkg.DirectoryPath)" - $lookupKey = ($pkg.DirectoryPath).Replace($RepoRoot, "").TrimStart('\/') + Write-Verbose "Processing changed files against $($pkg.Name). $pkgCounter of $($allPackageProperties.Count)." + $pkgDirectory = (Resolve-Path "$($pkg.DirectoryPath)").Path.Replace("`\", "/") + $lookupKey = $pkgDirectory.Replace($RepoRoot, "").TrimStart('\/') $lookup[$lookupKey] = $pkg # we only honor the individual artifact triggers @@ -338,24 +384,26 @@ function Get-PrPkgProperties([string]$InputDiffJson) { } foreach ($file in $targetedFiles) { - $filePath = (Join-Path $RepoRoot $file) + $filePath = (Join-Path $RepoRoot $file).Replace("`\", "/") # handle direct changes to packages - $shouldInclude = $filePath -eq $pkgDirectory -or $filePath -like (Join-Path "$pkgDirectory" "*") + $shouldInclude = $filePath -eq $pkgDirectory -or $filePath -like "$pkgDirectory/*" + + $includeMsg = "Including '$($pkg.Name)' because of changed file '$filePath'." # we only need to do additional work for indirect packages if we haven't already decided # to include this package due to this file if (-not $shouldInclude) { # handle changes to files that are RELATED to each package foreach($triggerPath in $triggeringPaths) { - $resolvedRelativePath = (Join-Path $RepoRoot $triggerPath) + $resolvedRelativePath = (Join-Path $RepoRoot $triggerPath).Replace("`\", "/") # triggerPaths can be direct files, so we need to check both startswith and direct equality - $includedForValidation = ($filePath -like (Join-Path "$resolvedRelativePath" "*") -or $filePath -eq $resolvedRelativePath) + $includedForValidation = ($filePath -like ("$resolvedRelativePath/*") -or $filePath -eq $resolvedRelativePath) $shouldInclude = $shouldInclude -or $includedForValidation if ($includedForValidation) { - $pkg.IncludedForValidation = $true + $includeMsg += " - (triggerPath: '$triggerPath')" + break } - break } # handle service-level changes to the ci.yml files @@ -364,7 +412,6 @@ function Get-PrPkgProperties([string]$InputDiffJson) { # there is a single ci.yml in that directory, we can assume that any file change in that directory # will apply to all packages that exist in that directory. $triggeringCIYmls = $triggeringPaths | Where-Object { $_ -like "*ci*.yml" } - foreach($yml in $triggeringCIYmls) { # given that this path is coming from the populated triggering paths in the artifact, # we can assume that the path to the ci.yml will successfully resolve. @@ -372,12 +419,15 @@ function Get-PrPkgProperties([string]$InputDiffJson) { # ensure we terminate the service directory with a / $directory = [System.IO.Path]::GetDirectoryName($ciYml).Replace("`\", "/") - # we should only continue with this check if the file being changed is "in the service directory" - # files that are directly included in triggerPaths will kept in full form, but otherwise we pre-process the targetedFiles to the - # directory containing the change. Given that pre-process, we should check both direct equality (when not triggeringPath) and parent directory - # for the case where the full form of the file has been left behind (because it was a triggeringPath) - $serviceDirectoryChange = (Split-Path $filePath -Parent).Replace("`\", "/") -eq $directory -or $filePath.Replace("`\", "/") -eq $directory - if (!$serviceDirectoryChange) { + # this filepath doesn't apply to this service directory at all, so we can break out of this loop + if (-not $filePath.StartsWith("$directory/")) { + break + } + + $relative = $filePath.SubString($directory.Length + 1) + + if ($relative.Contains("/") -or -not [IO.Path]::GetExtension($relative)){ + # this is a bare folder OR exists deeper than the service directory, so we can skip break } @@ -391,22 +441,18 @@ function Get-PrPkgProperties([string]$InputDiffJson) { $directoryIndex[$directory] = $soleCIYml } - if ($soleCIYml -and $filePath.Replace("`\", "/").StartsWith($directory)) { + if ($soleCIYml -and $filePath.StartsWith($directory)) { if (-not $shouldInclude) { - $pkg.IncludedForValidation = $true $shouldInclude = $true } break } - else { - # if the ci.yml is not the only file in the directory, we cannot assume that any file changed within the directory that isn't the ci.yml - # should trigger this package - Write-Host "Skipping adding package for file `"$file`" because the ci yml `"$yml`" is not the only file in the service directory `"$directory`"" - } } } if ($shouldInclude) { + + LogInfo $includeMsg $packagesWithChanges += $pkg if ($pkg.AdditionalValidationPackages) { @@ -433,6 +479,7 @@ function Get-PrPkgProperties([string]$InputDiffJson) { if ($pkg.Name -notin $existingPackageNames) { $pkg.IncludedForValidation = $true + LogInfo "Including '$($pkg.Name)' for validation only because it is a dependency of another package." $packagesWithChanges += $pkg } } @@ -441,13 +488,18 @@ function Get-PrPkgProperties([string]$InputDiffJson) { # now pass along the set of packages we've identified, the diff itself, and the full set of package properties # to locate any additional packages that should be included for validation if ($AdditionalValidationPackagesFromPackageSetFn -and (Test-Path "Function:$AdditionalValidationPackagesFromPackageSetFn")) { - $packagesWithChanges += &$AdditionalValidationPackagesFromPackageSetFn $packagesWithChanges $diff $allPackageProperties + $additionalPackages = &$AdditionalValidationPackagesFromPackageSetFn $packagesWithChanges $diff $allPackageProperties + $packagesWithChanges += $additionalPackages + foreach ($pkg in $additionalPackages) { + LogInfo "Including '$($pkg.Name)' from the additional validation package set." + } } # finally, if we have gotten all the way here and we still don't have any packages, we should include the template service # packages. We should never return NO validation. if ($packagesWithChanges.Count -eq 0) { - $packagesWithChanges += ($allPackageProperties | Where-Object { $_.ServiceDirectory -eq "template" }) + # most of our languages use `template` as the service directory for the template service, but `go` uses `template/aztemplate`. + $packagesWithChanges += ($allPackageProperties | Where-Object { $_.ServiceDirectory -eq "template"-or $_.ServiceDirectory -eq "template/aztemplate" }) foreach ($package in $packagesWithChanges) { $package.IncludedForValidation = $true } diff --git a/eng/common/scripts/Test-SampleMetadata.ps1 b/eng/common/scripts/Test-SampleMetadata.ps1 index 5f1c3dcd08c5..0b475455cdee 100644 --- a/eng/common/scripts/Test-SampleMetadata.ps1 +++ b/eng/common/scripts/Test-SampleMetadata.ps1 @@ -73,7 +73,7 @@ process { Write-Error "File '$($file.FullName)' contains invalid product slug: $product" -TargetObject $file ` -Category InvalidData -CategoryTargetName $product -CategoryTargetType string ` - -RecommendedAction 'Use only product slugs listed at https://review.learn.microsoft.com/help/platform/metadata-taxonomies?branch=main#product' + -RecommendedAction 'Use only product slugs listed at https://taxonomy.learn.microsoft.com/TaxonomyServiceAdminPage/#/taxonomy/' } } @@ -95,14 +95,11 @@ end { } begin { - # https://review.learn.microsoft.com/help/platform/metadata-taxonomies?branch=main#product + # https://taxonomy.learn.microsoft.com/TaxonomyServiceAdminPage/#/taxonomy/ $productSlugs = @( "ai-builder", "aspnet", "aspnet-core", - "azure-active-directory", - "azure-active-directory-b2c", - "azure-active-directory-domain", "azure-advisor", "azure-ai-content-safety", "azure-analysis-services", @@ -365,6 +362,16 @@ begin { "dynamics-talent-onboard", "ef-core", "ef6", + "entra", + "entra-external-id", + "entra-id", + "entra-id-protection", + "entra-identity-governance", + "entra-internet-access", + "entra-permissions-management", + "entra-private-access", + "entra-verified-id", + "entra-workload-identities", "expression-studio", "language-service", "m365-ems", @@ -509,7 +516,7 @@ Checks sample markdown files' frontmatter for invalid information. .DESCRIPTION Given a collection of markdown files, their frontmatter - if present - is checked for invalid information, including: -Invalid product slugs, i.e. those not listed in https://review.learn.microsoft.com/help/platform/metadata-taxonomies?branch=main#product. +Invalid product slugs, i.e. those not listed in https://taxonomy.learn.microsoft.com/TaxonomyServiceAdminPage/#/taxonomy/. .PARAMETER Path Specifies the path to an item to search. Wildcards are permitted. diff --git a/eng/common/scripts/Verify-Links.ps1 b/eng/common/scripts/Verify-Links.ps1 index 003e83f0e4e5..d4406c609d0e 100644 --- a/eng/common/scripts/Verify-Links.ps1 +++ b/eng/common/scripts/Verify-Links.ps1 @@ -11,9 +11,6 @@ .PARAMETER ignoreLinksFile Specifies the file that contains a set of links to ignore when verifying. - .PARAMETER devOpsLogging - Switch that will enable devops specific logging for warnings. - .PARAMETER recursive Check the links recurisvely. Applies to links starting with 'baseUrl' parameter. Defaults to true. @@ -45,6 +42,15 @@ .PARAMETER outputCacheFile Path to a file that the script will output all the validated links after running all checks. + .PARAMETER localGithubClonedRoot + Path to the root of a local github clone. This is used to resolve links to local files in the repo instead of making web requests. + + .PARAMETER localBuildRepoName + The name of the repo that is being built. This is used to resolve links to local files in the repo instead of making web requests. + + .PARAMETER localBuildRepoPath + The path to the local build repo. This is used to resolve links to local files in the repo instead of making web requests. + .PARAMETER requestTimeoutSec The number of seconds before we timeout when sending an individual web request. Default is 15 seconds. @@ -61,7 +67,6 @@ param ( [string[]] $urls, [string] $ignoreLinksFile = "$PSScriptRoot/ignore-links.txt", - [switch] $devOpsLogging = $false, [switch] $recursive = $true, [string] $baseUrl = "", [string] $rootUrl = "", @@ -72,14 +77,36 @@ param ( [string] $userAgent, [string] $inputCacheFile, [string] $outputCacheFile, - [string] $requestTimeoutSec = 15 + [string] $localGithubClonedRoot = "", + [string] $localBuildRepoName = "", + [string] $localBuildRepoPath = "", + [string] $requestTimeoutSec = 15 ) Set-StrictMode -Version 3.0 +. "$PSScriptRoot/logging.ps1" + $ProgressPreference = "SilentlyContinue"; # Disable invoke-webrequest progress dialog function ProcessLink([System.Uri]$linkUri) { + # To help improve performance and rate limiting issues with github links we try to resolve them based on a local clone if one exists. + if (($localGithubClonedRoot -or $localBuildRepoName) -and $linkUri -match '^https://github.com/(?Azure)/(?[^/]+)/(?:blob|tree)/(main|.*_[^/]+|.*/v[^/]+)/(?.*)$') { + + if ($localBuildRepoName -eq ($matches['org'] + "/" + $matches['repo'])) { + # If the link is to the current repo, use the local build path + $localPath = Join-Path $localBuildRepoPath $matches['path'] + } + else { + # Otherwise use the local github clone path + $localPath = Join-Path $localGithubClonedRoot $matches['repo'] $matches['path'] + } + + if (Test-Path $localPath) { + return $true + } + return ProcessStandardLink $linkUri + } if ($linkUri -match '^https?://?github\.com/(?)[^/]+/(?)[^/]+/wiki/.+') { # in an unauthenticated session, urls for missing pages will redirect to the wiki root return ProcessRedirectLink $linkUri -invalidStatusCodes 302 @@ -156,7 +183,7 @@ $emptyLinkMessage = "There is at least one empty link in the page. Please replac if (!$userAgent) { $userAgent = "Chrome/87.0.4280.88" } -function NormalizeUrl([string]$url){ +function NormalizeUrl([string]$url) { if (Test-Path $url) { $url = "file://" + (Resolve-Path $url).ToString(); } @@ -182,30 +209,6 @@ function NormalizeUrl([string]$url){ return $uri } -function LogWarning -{ - if ($devOpsLogging) - { - Write-Host "##vso[task.LogIssue type=warning;]$args" - } - else - { - Write-Warning "$args" - } -} - -function LogError -{ - if ($devOpsLogging) - { - Write-Host "##vso[task.logissue type=error]$args" - } - else - { - Write-Error "$args" - } -} - function ResolveUri ([System.Uri]$referralUri, [string]$link) { # If the link is mailto, skip it. @@ -254,14 +257,14 @@ function ParseLinks([string]$baseUri, [string]$htmlContent) $hrefRegex = "]+href\s*=\s*[""']?(?[^""']*)[""']?" $regexOptions = [System.Text.RegularExpressions.RegexOptions]"Singleline, IgnoreCase"; - $hrefs = [RegEx]::Matches($htmlContent, $hrefRegex, $regexOptions); + $matches = [RegEx]::Matches($htmlContent, $hrefRegex, $regexOptions); - #$hrefs | Foreach-Object { Write-Host $_ } + Write-Verbose "Found $($matches.Count) raw href's in page $baseUri"; - Write-Verbose "Found $($hrefs.Count) raw href's in page $baseUri"; - [string[]] $links = $hrefs | ForEach-Object { ResolveUri $baseUri $_.Groups["href"].Value } + # Html encoded urls in anchor hrefs need to be decoded + $urls = $matches | ForEach-Object { [System.Web.HttpUtility]::HtmlDecode($_.Groups["href"].Value) } - #$links | Foreach-Object { Write-Host $_ } + [string[]] $links = $urls | ForEach-Object { ResolveUri $baseUri $_ } if ($null -eq $links) { $links = @() @@ -507,6 +510,7 @@ if ($inputCacheFile) $goodLinks = $cacheContent.Split("`n").Where({ $_.Trim() -ne "" -and !$_.StartsWith("#") }) foreach ($goodLink in $goodLinks) { + $goodLink = $goodLink.Trim() $checkedLinks[$goodLink] = $true } } @@ -524,9 +528,8 @@ foreach ($url in $urls) { $pageUrisToCheck.Enqueue($uri); } -if ($devOpsLogging) { - Write-Host "##[group]Link checking details" -} +LogGroupStart "Link checking details" + while ($pageUrisToCheck.Count -ne 0) { $pageUri = $pageUrisToCheck.Dequeue(); @@ -562,9 +565,7 @@ while ($pageUrisToCheck.Count -ne 0) } try { - if ($devOpsLogging) { - Write-Host "##[endgroup]" - } + LogGroupEnd if ($badLinks.Count -gt 0) { Write-Host "Summary of broken links:" @@ -587,7 +588,7 @@ try { if ($outputCacheFile) { - $goodLinks = $checkedLinks.Keys.Where({ "True" -eq $checkedLinks[$_].ToString() }) | Sort-Object + $goodLinks = $checkedLinks.Keys.Where({ "True" -eq $checkedLinks[$_].ToString()}) | Sort-Object -Unique Write-Host "Writing the list of validated links to $outputCacheFile" $goodLinks | Set-Content $outputCacheFile diff --git a/eng/common/scripts/common.ps1 b/eng/common/scripts/common.ps1 index a8c38d0a0126..6bef283e449c 100644 --- a/eng/common/scripts/common.ps1 +++ b/eng/common/scripts/common.ps1 @@ -1,6 +1,6 @@ # cSpell:ignore Apireview # cSpell:ignore Onboarded -$RepoRoot = Resolve-Path "${PSScriptRoot}..\..\..\.." +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot .. .. ..) $EngDir = Join-Path $RepoRoot "eng" $EngCommonDir = Join-Path $EngDir "common" $EngCommonScriptsDir = Join-Path $EngCommonDir "scripts" @@ -24,6 +24,9 @@ $PackageRepository = "Unknown" $packagePattern = "Unknown" $MetadataUri = "Unknown" +# Whether the language repo supports automatically loading .env file generated from TestResources scripts. +$SupportsTestResourcesDotenv = $false + # Import common language settings $EngScriptsLanguageSettings = Join-path $EngScriptsDir "Language-Settings.ps1" if (Test-Path $EngScriptsLanguageSettings) { diff --git a/eng/common/scripts/copy-docs-to-blobstorage.ps1 b/eng/common/scripts/copy-docs-to-blobstorage.ps1 index bfcae988b875..852945338e9d 100644 --- a/eng/common/scripts/copy-docs-to-blobstorage.ps1 +++ b/eng/common/scripts/copy-docs-to-blobstorage.ps1 @@ -61,7 +61,7 @@ function ToSemVer($version){ function SortSemVersions($versions) { - return $versions | Sort -Property Major, Minor, Patch, PrereleaseLabel, PrereleaseNumber -Descending + return $versions | Sort-Object -Property Major, Minor, Patch, PrereleaseLabel, PrereleaseNumber -Descending } function Sort-Versions diff --git a/eng/common/scripts/job-matrix/Create-JobMatrix.ps1 b/eng/common/scripts/job-matrix/Create-JobMatrix.ps1 index fe98ca172167..d35b3c923a6d 100644 --- a/eng/common/scripts/job-matrix/Create-JobMatrix.ps1 +++ b/eng/common/scripts/job-matrix/Create-JobMatrix.ps1 @@ -15,30 +15,46 @@ param ( [Parameter(Mandatory=$False)][array] $Filters, [Parameter(Mandatory=$False)][array] $Replace, [Parameter(Mandatory=$False)][array] $NonSparseParameters, + # Use for local generation/debugging when env: values are set in a matrix + [Parameter(Mandatory=$False)][switch] $SkipEnvironmentVariables, [Parameter()][switch] $CI = ($null -ne $env:SYSTEM_TEAMPROJECTID) ) . $PSScriptRoot/job-matrix-functions.ps1 +. $PSScriptRoot/../logging.ps1 if (!(Test-Path $ConfigPath)) { Write-Error "ConfigPath '$ConfigPath' does not exist." exit 1 } -$config = GetMatrixConfigFromFile (Get-Content $ConfigPath -Raw) +$rawConfig = Get-Content $ConfigPath -Raw +$config = GetMatrixConfigFromFile $rawConfig # Strip empty string filters in order to be able to use azure pipelines yaml join() $Filters = $Filters | Where-Object { $_ } +LogGroupStart "Matrix generation configuration" +Write-Host "Configuration File: $ConfigPath" +Write-Host $rawConfig +Write-Host "SelectionType: $Selection" +Write-Host "DisplayNameFilter: $DisplayNameFilter" +Write-Host "Filters: $Filters" +Write-Host "Replace: $Replace" +Write-Host "NonSparseParameters: $NonSparseParameters" +LogGroupEnd + [array]$matrix = GenerateMatrix ` -config $config ` -selectFromMatrixType $Selection ` -displayNameFilter $DisplayNameFilter ` -filters $Filters ` -replace $Replace ` - -nonSparseParameters $NonSparseParameters + -nonSparseParameters $NonSparseParameters ` + -skipEnvironmentVariables:$SkipEnvironmentVariables $serialized = SerializePipelineMatrix $matrix -Write-Output $serialized.pretty +Write-Host "Generated matrix:" +Write-Host $serialized.pretty if ($CI) { Write-Output "##vso[task.setVariable variable=matrix;isOutput=true]$($serialized.compressed)" diff --git a/eng/common/scripts/job-matrix/job-matrix-functions.ps1 b/eng/common/scripts/job-matrix/job-matrix-functions.ps1 index acfb6da29549..68a082b727e9 100644 --- a/eng/common/scripts/job-matrix/job-matrix-functions.ps1 +++ b/eng/common/scripts/job-matrix/job-matrix-functions.ps1 @@ -101,8 +101,8 @@ function GenerateMatrix( ) { $result = ProcessImport $config.matrixParameters $selectFromMatrixType $nonSparseParameters $config.displayNamesLookup - $matrixParameters = $result.Matrix - $importedMatrix = $result.ImportedMatrix + $matrixParameters = $result.Matrix + $importedMatrix = $result.ImportedMatrix $combinedDisplayNameLookup = $result.DisplayNamesLookup if ($selectFromMatrixType -eq "sparse") { @@ -148,7 +148,7 @@ function ProcessNonSparseParameters( $nonSparse = [MatrixParameter[]]@() foreach ($param in $parameters) { - if ($null -eq $param){ + if ($null -eq $param) { continue } if ($param.Name -in $nonSparseParameters) { @@ -430,9 +430,9 @@ function ProcessImport([MatrixParameter[]]$matrix, [String]$selection, [Array]$n } if ((!$matrix -and !$importPath) -or !$importPath) { return [PSCustomObject]@{ - Matrix = $matrix - ImportedMatrix = @() - DisplayNamesLookup = $displayNamesLookup + Matrix = $matrix + ImportedMatrix = @() + DisplayNamesLookup = $displayNamesLookup } } @@ -456,9 +456,9 @@ function ProcessImport([MatrixParameter[]]$matrix, [String]$selection, [Array]$n } return [PSCustomObject]@{ - Matrix = $matrix ?? @() - ImportedMatrix = $importedMatrix - DisplayNamesLookup = $combinedDisplayNameLookup + Matrix = $matrix ?? @() + ImportedMatrix = $importedMatrix + DisplayNamesLookup = $combinedDisplayNameLookup } } @@ -643,7 +643,7 @@ function InitializeMatrix { function GetMatrixDimensions([MatrixParameter[]]$parameters) { $dimensions = @() foreach ($param in $parameters) { - if ($null -eq $param){ + if ($null -eq $param) { continue } $dimensions += $param.Length() @@ -760,12 +760,12 @@ function Get4dMatrixIndex([int]$index, [Array]$dimensions) { function GenerateMatrixForConfig { param ( - [Parameter(Mandatory = $true)][string] $ConfigPath, - [Parameter(Mandatory = $true)][string] $Selection, - [Parameter(Mandatory = $false)][string] $DisplayNameFilter, - [Parameter(Mandatory = $false)][array] $Filters, - [Parameter(Mandatory = $false)][array] $Replace, - [Parameter(Mandatory = $false)][Array] $NonSparseParameters = @() + [Parameter(Mandatory = $true)][string] $ConfigPath, + [Parameter(Mandatory = $true)][string] $Selection, + [Parameter(Mandatory = $false)][string] $DisplayNameFilter, + [Parameter(Mandatory = $false)][array] $Filters, + [Parameter(Mandatory = $false)][array] $Replace, + [Parameter(Mandatory = $false)][Array] $NonSparseParameters = @() ) $matrixFile = Join-Path $PSScriptRoot ".." ".." ".." ".." $ConfigPath @@ -776,12 +776,12 @@ function GenerateMatrixForConfig { $Filters = $Filters | Where-Object { $_ } [array]$matrix = GenerateMatrix ` - -config $config ` - -selectFromMatrixType $Selection ` - -displayNameFilter $DisplayNameFilter ` - -filters $Filters ` - -replace $Replace ` - -nonSparseParameters $NonSparseParameters + -config $config ` + -selectFromMatrixType $Selection ` + -displayNameFilter $DisplayNameFilter ` + -filters $Filters ` + -replace $Replace ` + -nonSparseParameters $NonSparseParameters return , $matrix } diff --git a/eng/common/scripts/logging.ps1 b/eng/common/scripts/logging.ps1 index 1b459d004ad0..94dc900dba9a 100644 --- a/eng/common/scripts/logging.ps1 +++ b/eng/common/scripts/logging.ps1 @@ -35,7 +35,7 @@ function LogWarning { Write-Host ("##vso[task.LogIssue type=warning;]$args" -replace "`n", "%0D%0A") } elseif (Test-SupportsGitHubLogging) { - Write-Warning ("::warning::$args" -replace "`n", "%0D%0A") + Write-Host ("::warning::$args" -replace "`n", "%0D%0A") } else { Write-Warning "$args" @@ -56,7 +56,7 @@ function LogErrorForFile($file, $errorString) Write-Host ("##vso[task.logissue type=error;sourcepath=$file;linenumber=1;columnnumber=1;]$errorString" -replace "`n", "%0D%0A") } elseif (Test-SupportsGitHubLogging) { - Write-Error ("::error file=$file,line=1,col=1::$errorString" -replace "`n", "%0D%0A") + Write-Host ("::error file=$file,line=1,col=1::$errorString" -replace "`n", "%0D%0A") } else { Write-Error "[Error in file $file]$errorString" @@ -68,7 +68,7 @@ function LogError { Write-Host ("##vso[task.LogIssue type=error;]$args" -replace "`n", "%0D%0A") } elseif (Test-SupportsGitHubLogging) { - Write-Error ("::error::$args" -replace "`n", "%0D%0A") + Write-Host ("::error::$args" -replace "`n", "%0D%0A") } else { Write-Error "$args" @@ -80,7 +80,7 @@ function LogDebug { Write-Host "[debug]$args" } elseif (Test-SupportsGitHubLogging) { - Write-Debug "::debug::$args" + Write-Host "::debug::$args" } else { Write-Debug "$args" diff --git a/eng/common/scripts/stress-testing/stress-test-deployment-lib.ps1 b/eng/common/scripts/stress-testing/stress-test-deployment-lib.ps1 index 3de02c1d7ac5..a2ec15999ad2 100644 --- a/eng/common/scripts/stress-testing/stress-test-deployment-lib.ps1 +++ b/eng/common/scripts/stress-testing/stress-test-deployment-lib.ps1 @@ -48,7 +48,15 @@ function Login([string]$subscription, [string]$tenant, [string]$clusterGroup, [s Write-Host "Logging in to subscription, cluster and container registry" az account show -s "$subscription" *> $null if ($LASTEXITCODE) { - RunOrExitOnFailure az login --allow-no-subscriptions --tenant $tenant + Run az login --allow-no-subscriptions --tenant $tenant + if ($LASTEXITCODE) { + throw "You do not have access to the TME subscription. Follow these steps to join the group: https://dev.azure.com/azure-sdk/internal/_wiki/wikis/internal.wiki/206/Subscription-and-Tenant-Usage?anchor=azure-sdk-test-resources-tme" + } + } + + $subscriptions = (Run az account list -o json) | ConvertFrom-Json + if ($subscriptions.Length -eq 0) { + throw "You do not have access to the TME subscription. Follow these steps to join the group: https://dev.azure.com/azure-sdk/internal/_wiki/wikis/internal.wiki/206/Subscription-and-Tenant-Usage?anchor=azure-sdk-test-resources-tme" } # Discover cluster name, only one cluster per group is expected diff --git a/eng/common/spelling/package-lock.json b/eng/common/spelling/package-lock.json index a87e13d2c52b..8f3d520f4a8c 100644 --- a/eng/common/spelling/package-lock.json +++ b/eng/common/spelling/package-lock.json @@ -509,9 +509,10 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2021,9 +2022,9 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" diff --git a/eng/common/testproxy/install-test-proxy.ps1 b/eng/common/testproxy/install-test-proxy.ps1 index 402e5ddc8cd2..a97b45754d86 100644 --- a/eng/common/testproxy/install-test-proxy.ps1 +++ b/eng/common/testproxy/install-test-proxy.ps1 @@ -16,18 +16,15 @@ param( $InstallDirectory ) -. (Join-Path $PSScriptRoot test-proxy.ps1) +. (Join-Path $PSScriptRoot '..' 'scripts' 'Helpers' 'AzSdkTool-Helpers.ps1') Write-Host "Attempting to download and install version `"$Version`" into `"$InstallDirectory`"" -Install-Standalone-TestProxy -Version $Version -Directory $InstallDirectory +$exe = Install-Standalone-Tool ` + -Version $Version ` + -FileName "test-proxy" ` + -Package "Azure.Sdk.Tools.TestProxy" ` + -Directory $InstallDirectory -$PROXY_EXE = "" - -if ($IsWindows) { - $PROXY_EXE = Join-Path $InstallDirectory "Azure.Sdk.Tools.TestProxy.exe" -} else { - $PROXY_EXE = Join-Path $InstallDirectory "Azure.Sdk.Tools.TestProxy" -} -Write-Host "Downloaded test-proxy available at $PROXY_EXE." -Write-Host "##vso[task.setvariable variable=PROXY_EXE]$PROXY_EXE" +Write-Host "Downloaded test-proxy available at $exe." +Write-Host "##vso[task.setvariable variable=PROXY_EXE]$exe" diff --git a/eng/common/testproxy/target_version.txt b/eng/common/testproxy/target_version.txt index ac6bc7f9f590..977b480f4fd3 100644 --- a/eng/common/testproxy/target_version.txt +++ b/eng/common/testproxy/target_version.txt @@ -1 +1 @@ -1.0.0-dev.20250221.1 +1.0.0-dev.20250501.1 diff --git a/eng/common/testproxy/test-proxy-standalone-tool.yml b/eng/common/testproxy/test-proxy-standalone-tool.yml index 596add2b57d3..fb9696e6de1f 100644 --- a/eng/common/testproxy/test-proxy-standalone-tool.yml +++ b/eng/common/testproxy/test-proxy-standalone-tool.yml @@ -32,37 +32,54 @@ steps: } Write-Host "Installing test-proxy version $version" + Write-Host "${{ parameters.templateRoot }}/eng/common/testproxy/install-test-proxy.ps1 -Version $version -InstallDirectory $(Build.BinariesDirectory)/test-proxy" ${{ parameters.templateRoot }}/eng/common/testproxy/install-test-proxy.ps1 -Version $version -InstallDirectory $(Build.BinariesDirectory)/test-proxy displayName: "Install test-proxy" condition: and(succeeded(), ${{ parameters.condition }}) - pwsh: | + Write-Host "Prepending path with the test proxy tool install location: '$(Build.BinariesDirectory)/test-proxy'" Write-Host "##vso[task.prependpath]$(Build.BinariesDirectory)/test-proxy" displayName: "Prepend path with test-proxy tool install location" - ${{ if eq(parameters.runProxy, 'true') }}: - pwsh: | + Write-Host "Setting ASPNETCORE_Kestrel__Certificates__Default__Path to '${{ parameters.templateRoot }}/eng/common/testproxy/dotnet-devcert.pfx'" Write-Host "##vso[task.setvariable variable=ASPNETCORE_Kestrel__Certificates__Default__Path]${{ parameters.templateRoot }}/eng/common/testproxy/dotnet-devcert.pfx" + Write-Host "Setting ASPNETCORE_Kestrel__Certificates__Default__Password to 'password'" Write-Host "##vso[task.setvariable variable=ASPNETCORE_Kestrel__Certificates__Default__Password]password" + Write-Host "Setting PROXY_MANUAL_START to 'true'" Write-Host "##vso[task.setvariable variable=PROXY_MANUAL_START]true" displayName: 'Configure Kestrel and PROXY_MANUAL_START Variables' condition: and(succeeded(), ${{ parameters.condition }}) - pwsh: | + $invocation = @" + Start-Process $(PROXY_EXE) + -ArgumentList `"start -u --storage-location ${{ parameters.rootFolder }}`" + -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.rootFolder }}/test-proxy.log + -RedirectStandardError ${{ parameters.rootFolder }}/test-proxy-error.log + "@ + Write-Host $invocation + $Process = Start-Process $(PROXY_EXE) ` -ArgumentList "start -u --storage-location ${{ parameters.rootFolder }}" ` -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.rootFolder }}/test-proxy.log ` -RedirectStandardError ${{ parameters.rootFolder }}/test-proxy-error.log + Write-Host "Setting PROXY_PID to $($Process.Id)" Write-Host "##vso[task.setvariable variable=PROXY_PID]$($Process.Id)" displayName: 'Run the testproxy - windows' condition: and(succeeded(), eq(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }}) # nohup does NOT continue beyond the current session if you use it within powershell - bash: | + echo "nohup $(PROXY_EXE) 1>${{ parameters.rootFolder }}/test-proxy.log 2>${{ parameters.rootFolder }}/test-proxy-error.log &" nohup $(PROXY_EXE) 1>${{ parameters.rootFolder }}/test-proxy.log 2>${{ parameters.rootFolder }}/test-proxy-error.log & echo $! > $(Build.SourcesDirectory)/test-proxy.pid + + echo "Setting PROXY_PID to $(cat $(Build.SourcesDirectory)/test-proxy.pid)" echo "##vso[task.setvariable variable=PROXY_PID]$(cat $(Build.SourcesDirectory)/test-proxy.pid)" displayName: "Run the testproxy - linux/mac" condition: and(succeeded(), ne(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }}) @@ -71,7 +88,9 @@ steps: - pwsh: | for ($i = 0; $i -lt 10; $i++) { try { + Write-Host "Invoke-WebRequest -Uri `"http://localhost:5000/Admin/IsAlive`" | Out-Null" Invoke-WebRequest -Uri "http://localhost:5000/Admin/IsAlive" | Out-Null + Write-Host "Successfully connected to the test proxy on port 5000." exit 0 } catch { Write-Warning "Failed to successfully connect to test proxy. Retrying..." diff --git a/eng/common/testproxy/test-proxy-tool.yml b/eng/common/testproxy/test-proxy-tool.yml index db1bba2f4410..03c9dbaa00c1 100644 --- a/eng/common/testproxy/test-proxy-tool.yml +++ b/eng/common/testproxy/test-proxy-tool.yml @@ -33,6 +33,14 @@ steps: Write-Host "Installing test-proxy version $version" + $invocation = @" + dotnet tool install azure.sdk.tools.testproxy ` + --tool-path $(Build.BinariesDirectory)/test-proxy ` + --add-source https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-net/nuget/v3/index.json ` + --version $version + "@ + Write-Host $invocation + dotnet tool install azure.sdk.tools.testproxy ` --tool-path $(Build.BinariesDirectory)/test-proxy ` --add-source https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-net/nuget/v3/index.json ` @@ -41,23 +49,36 @@ steps: condition: and(succeeded(), ${{ parameters.condition }}) - pwsh: | + Write-Host "Prepending path with the test proxy tool install location: '$(Build.BinariesDirectory)/test-proxy'" Write-Host "##vso[task.prependpath]$(Build.BinariesDirectory)/test-proxy" displayName: "Prepend path with test-proxy tool install location" - ${{ if eq(parameters.runProxy, 'true') }}: - pwsh: | + Write-Host "Setting ASPNETCORE_Kestrel__Certificates__Default__Path to '${{ parameters.templateRoot }}/eng/common/testproxy/dotnet-devcert.pfx'" Write-Host "##vso[task.setvariable variable=ASPNETCORE_Kestrel__Certificates__Default__Path]${{ parameters.templateRoot }}/eng/common/testproxy/dotnet-devcert.pfx" + Write-Host "Setting ASPNETCORE_Kestrel__Certificates__Default__Password to 'password'" Write-Host "##vso[task.setvariable variable=ASPNETCORE_Kestrel__Certificates__Default__Password]password" + Write-Host "Setting PROXY_MANUAL_START to 'true'" Write-Host "##vso[task.setvariable variable=PROXY_MANUAL_START]true" displayName: 'Configure Kestrel and PROXY_MANUAL_START Variables' condition: and(succeeded(), ${{ parameters.condition }}) - pwsh: | + $invocation = @" + Start-Process $(Build.BinariesDirectory)/test-proxy/test-proxy.exe + -ArgumentList `"start -u --storage-location ${{ parameters.rootFolder }}`" + -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.rootFolder }}/test-proxy.log + -RedirectStandardError ${{ parameters.rootFolder }}/test-proxy-error.log + "@ + Write-Host $invocation + $Process = Start-Process $(Build.BinariesDirectory)/test-proxy/test-proxy.exe ` -ArgumentList "start -u --storage-location ${{ parameters.rootFolder }}" ` -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.rootFolder }}/test-proxy.log ` -RedirectStandardError ${{ parameters.rootFolder }}/test-proxy-error.log + Write-Host "Setting PROXY_PID to $($Process.Id)" Write-Host "##vso[task.setvariable variable=PROXY_PID]$($Process.Id)" displayName: 'Run the testproxy - windows' condition: and(succeeded(), eq(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }}) @@ -69,6 +90,8 @@ steps: nohup $(Build.BinariesDirectory)/test-proxy/test-proxy 1>${{ parameters.rootFolder }}/test-proxy.log 2>${{ parameters.rootFolder }}/test-proxy-error.log & echo $! > $(Build.SourcesDirectory)/test-proxy.pid + + echo "Setting PROXY_PID to $(cat $(Build.SourcesDirectory)/test-proxy.pid)" echo "##vso[task.setvariable variable=PROXY_PID]$(cat $(Build.SourcesDirectory)/test-proxy.pid)" displayName: "Run the testproxy - linux/mac" condition: and(succeeded(), ne(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }}) @@ -79,7 +102,9 @@ steps: - pwsh: | for ($i = 0; $i -lt 10; $i++) { try { + Write-Host "Invoke-WebRequest -Uri `"http://localhost:5000/Admin/IsAlive`" | Out-Null" Invoke-WebRequest -Uri "http://localhost:5000/Admin/IsAlive" | Out-Null + Write-Host "Successfully connected to the test proxy on port 5000." exit 0 } catch { Write-Warning "Failed to successfully connect to test proxy. Retrying..." diff --git a/eng/common/testproxy/test-proxy.ps1 b/eng/common/testproxy/test-proxy.ps1 deleted file mode 100644 index f1bf1eca8fd4..000000000000 --- a/eng/common/testproxy/test-proxy.ps1 +++ /dev/null @@ -1,162 +0,0 @@ -Set-StrictMode -Version 4 -$AVAILABLE_TEST_PROXY_BINARIES = @{ - "Windows" = @{ - "AMD64" = @{ - "system" = "Windows" - "machine" = "AMD64" - "file_name" = "test-proxy-standalone-win-x64.zip" - "executable" = "Azure.Sdk.Tools.TestProxy.exe" - } - } - "Linux" = @{ - "X86_64" = @{ - "system" = "Linux" - "machine" = "X86_64" - "file_name" = "test-proxy-standalone-linux-x64.tar.gz" - "executable" = "Azure.Sdk.Tools.TestProxy" - } - "ARM64" = @{ - "system" = "Linux" - "machine" = "ARM64" - "file_name" = "test-proxy-standalone-linux-arm64.tar.gz" - "executable" = "Azure.Sdk.Tools.TestProxy" - } - } - "Darwin" = @{ - "X86_64" = @{ - "system" = "Darwin" - "machine" = "X86_64" - "file_name" = "test-proxy-standalone-osx-x64.zip" - "executable" = "Azure.Sdk.Tools.TestProxy" - } - "ARM64" = @{ - "system" = "Darwin" - "machine" = "ARM64" - "file_name" = "test-proxy-standalone-osx-arm64.zip" - "executable" = "Azure.Sdk.Tools.TestProxy" - } - } -} - -function Get-SystemArchitecture { - $unameOutput = uname -m - switch ($unameOutput) { - "x86_64" { return "X86_64" } - "aarch64" { return "ARM64" } - "arm64" { return "ARM64" } - default { throw "Unable to determine system architecture. uname -m returned $unameOutput." } - } -} - -function Get-Proxy-Meta () { - $ErrorActionPreferenceDefault = $ErrorActionPreference - $ErrorActionPreference = "Stop" - - $os = "unknown" - $machine = Get-SystemArchitecture - - if ($IsWindows) { - $os = "Windows" - # we only support x64 on windows, if that doesn't work the platform is unsupported - $machine = "AMD64" - } elseif ($IsLinux) { - $os = "Linux" - } elseif ($IsMacOS) { - $os = "Darwin" - } - - $ErrorActionPreference = $ErrorActionPreferenceDefault - - return $AVAILABLE_TEST_PROXY_BINARIES[$os][$machine] -} - -function Get-Proxy-Url ( - [Parameter(mandatory=$true)]$Version -) { - $systemDetails = Get-Proxy-Meta - - $file = $systemDetails.file_name - $url = "https://github.com/Azure/azure-sdk-tools/releases/download/Azure.Sdk.Tools.TestProxy_$Version/$file" - - return $url -} - -function Cleanup-Directory ($path) { - if (Test-Path -Path $path) { - Remove-Item -Path $path -Recurse -Force - } - New-Item -ItemType Directory -Path $path -Force -} - -function Is-Work-Necessary ( - [Parameter(mandatory=$true)] - $Version, - [Parameter(mandatory=$true)] - $Directory -) { - $savedVersionTxt = Join-Path $Directory "downloaded_version.txt" - if (Test-Path $savedVersionTxt) { - $result = (Get-Content -Raw $savedVersionTxt).Trim() - - if ($result -eq $Version) { - return $false - } - } - - return $true -} - -<# -.SYNOPSIS -Installs a standalone version of the test-proxy. -.PARAMETER Version -The version of the proxy to install. Requires a full version to be provided. EG "1.0.0-dev.20240617.1" -.PARAMETER Directory -The directory within which the test-proxy exe will exist after this function invokes. Defaults to "." -#> -function Install-Standalone-TestProxy ( - [Parameter(mandatory=$true)] - $Version, - $Directory="." -) { - $ErrorActionPreference = "Stop" - $systemDetails = Get-Proxy-Meta - - if (!(Test-Path $Directory) -and $Directory -ne ".") { - New-Item -ItemType Directory -Path $Directory -Force - } - - $downloadFolder = Resolve-Path $Directory - $downloadUrl = Get-Proxy-Url $Version - $downloadFile = $downloadUrl.Split('/')[-1] - $downloadLocation = Join-Path $downloadFolder $downloadFile - $savedVersionTxt = Join-Path $downloadFolder "downloaded_version.txt" - - if (Is-Work-Necessary $version $downloadFolder) { - Write-Host "Commencing installation of `"$Version`" to `"$downloadFolder`" from $downloadUrl." - Invoke-WebRequest -Uri $downloadUrl -OutFile $downloadLocation - - if ($downloadFile -like "*.zip") { - Expand-Archive -Path $downloadLocation -DestinationPath $downloadFolder -Force - } elseif ($downloadFile -like "*.tar.gz") { - tar -xzf $downloadLocation -C $downloadFolder - } else { - throw "Unsupported file format" - } - - # Remove the downloaded file after extraction - Remove-Item -Path $downloadLocation -Force - - # Record downloaded version - Set-Content -Path $savedVersionTxt -Value $Version - - # Set executable permissions if on macOS (Darwin) - $executable_path = Join-Path $downloadFolder $systemDetails.executable - if ($IsMacOS) { - chmod 755 $executable_path - } - } - else { - Write-Host "Target version `"$Version`" already present in target directory `"$downloadFolder.`"" - } -} diff --git a/eng/pipelines/codeowners-linter.yml b/eng/pipelines/codeowners-linter.yml index 1b46ff948253..a9979632406d 100644 --- a/eng/pipelines/codeowners-linter.yml +++ b/eng/pipelines/codeowners-linter.yml @@ -28,7 +28,7 @@ stages: timeoutInMinutes: 120 pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-22.04 + demands: ImageOverride -equals ubuntu-24.04 variables: CodeownersLinterVersion: '1.0.0-dev.20240926.2' diff --git a/eng/pipelines/guardian.yml b/eng/pipelines/guardian.yml new file mode 100644 index 000000000000..824f376e54e4 --- /dev/null +++ b/eng/pipelines/guardian.yml @@ -0,0 +1,29 @@ +trigger: none +pr: none + +resources: + repositories: + - repository: 1ESPipelineTemplates + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + +variables: + - template: /eng/pipelines/templates/variables/image.yml + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1ESPipelineTemplates + parameters: + featureFlags: + incrementalSDLSourceAnalysis: true + sdl: + enableAllTools: false + sourceAnalysisPool: + name: $(WINDOWSPOOL) + image: $(WINDOWSVMIMAGE) + os: windows + credscan: + enabled: true + # This uses the Credscan Supression file from .config/CredScanSuppressions.json which is the default location for Credscan Suppression Files + policheck: + enabled: true diff --git a/eng/pipelines/spec-gen-sdk-batch.yml b/eng/pipelines/spec-gen-sdk-batch.yml index 7cacb9129391..af06bfacac04 100644 --- a/eng/pipelines/spec-gen-sdk-batch.yml +++ b/eng/pipelines/spec-gen-sdk-batch.yml @@ -6,6 +6,10 @@ parameters: - 'all-typespecs' - 'all-openapis' - 'sample-typespecs' + - 'all-mgmtplane-typespecs' + - 'all-dataplane-typespecs' + - 'all-mgmtplane-openapis' + - 'all-dataplane-openapis' default: 'sample-typespecs' displayName: 'Batch Specs to Run' diff --git a/eng/pipelines/spec-gen-sdk.yml b/eng/pipelines/spec-gen-sdk.yml index 8aa835d11d91..0b5b5e849d6e 100644 --- a/eng/pipelines/spec-gen-sdk.yml +++ b/eng/pipelines/spec-gen-sdk.yml @@ -31,10 +31,6 @@ parameters: displayName: 'Skip SDK pull request creation' trigger: none -pr: - paths: - include: - - specification/** extends: template: /eng/pipelines/templates/stages/archetype-spec-gen-sdk.yml diff --git a/eng/pipelines/swagger-apiview.yml b/eng/pipelines/swagger-apiview.yml index bb81469a9555..d1aa2ef0e9c9 100644 --- a/eng/pipelines/swagger-apiview.yml +++ b/eng/pipelines/swagger-apiview.yml @@ -10,7 +10,7 @@ parameters: default: 'swaggerAPIViewArtifacts' - name: APIViewAPIUri type: string - default: 'https://apiview.dev/PullRequest/DetectAPIChanges' + default: 'https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges' pr: branches: @@ -21,7 +21,7 @@ jobs: - job: pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-22.04 + demands: ImageOverride -equals ubuntu-24.04 steps: - checkout: self @@ -62,6 +62,6 @@ jobs: -RepoName $(Build.Repository.Name) ` -PullRequestNumber $(System.PullRequest.PullRequestNumber)` -Language 'Swagger' ` - -CommitSha $(Build.SourceVersion) + -CommitSha $(System.PullRequest.SourceCommitId) displayName: Create Swagger APIView condition: and(succeeded(), ne(variables['Agent.JobStatus'], 'SucceededWithIssues')) \ No newline at end of file diff --git a/eng/pipelines/swagger-prettier-check.yml b/eng/pipelines/swagger-prettier-check.yml index 8766cddcfb38..140626ec0136 100644 --- a/eng/pipelines/swagger-prettier-check.yml +++ b/eng/pipelines/swagger-prettier-check.yml @@ -4,7 +4,7 @@ jobs: - job: pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-22.04 + demands: ImageOverride -equals ubuntu-24.04 variables: - template: /eng/pipelines/templates/variables/globals.yml diff --git a/eng/pipelines/templates/stages/1es-redirect.yml b/eng/pipelines/templates/stages/1es-redirect.yml deleted file mode 100644 index 96dcde9a810f..000000000000 --- a/eng/pipelines/templates/stages/1es-redirect.yml +++ /dev/null @@ -1,49 +0,0 @@ -resources: - repositories: - - repository: 1ESPipelineTemplates - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - -parameters: -- name: stages - type: stageList - default: [] -- name: Use1ESOfficial - type: boolean - default: true -- name: GenerateBaselines - type: boolean - default: false - -extends: - ${{ if and(parameters.Use1ESOfficial, eq(variables['System.TeamProject'], 'internal')) }}: - template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates - parameters: - settings: - skipBuildTagsForGitHubPullRequests: true - sdl: - ${{ if and(parameters.GenerateBaselines, eq(variables['Build.SourceBranchName'], 'main'), eq(variables['System.TeamProject'], 'internal')) }}: - autobaseline: - isMainPipeline: true - disableAutoBaselineOnNonDefaultBranches: true - enableForGitHub: true - sourceAnalysisPool: - name: azsdk-pool - image: windows-2022 - os: windows - psscriptanalyzer: - compiled: true - break: true - policy: M365 - stages: ${{ parameters.stages }} - ${{ else }}: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1ESPipelineTemplates - parameters: - sdl: # SDLSources stage still runs even all tools are disabled. 1es team uses it for other purposes and they have the following work item to remove the stage. - enableAllTools: false # https://dev.azure.com/mseng/1ES/_workitems/edit/2253084 - sourceAnalysisPool: - name: azsdk-pool - image: windows-2022 - os: windows - stages: ${{ parameters.stages }} \ No newline at end of file diff --git a/eng/pipelines/templates/stages/archetype-spec-gen-sdk.yml b/eng/pipelines/templates/stages/archetype-spec-gen-sdk.yml index 88cd2f2ea9c3..764f9e421e0b 100644 --- a/eng/pipelines/templates/stages/archetype-spec-gen-sdk.yml +++ b/eng/pipelines/templates/stages/archetype-spec-gen-sdk.yml @@ -29,263 +29,222 @@ parameters: type: string default: '' -extends: - template: /eng/pipelines/templates/stages/1es-redirect.yml - parameters: - Use1ESOfficial: false - stages: - - stage: Build - displayName: 'SDK Generation Preview' - jobs: - - job: - timeoutInMinutes: 2400 - - variables: - - template: /eng/pipelines/templates/variables/image.yml - - name: NodeVersion - value: '22.13.x' - - name: PythonVersion - value: '3.13' - - name: PipelineArtifactsName - value: 'packages' - - name: SpecRepoCommit - value: $(Build.SourceVersion) - - pool: - name: $(LINUXPOOL) - vmImage: $(LINUXVMIMAGE) - os: linux - - templateContext: - outputParentDirectory: $(System.DefaultWorkingDirectory)/out - outputs: - - output: pipelineArtifact - displayName: Publish logs to Pipeline Artifacts - artifactName: "spec-gen-sdk-logs" - targetPath: "$(System.DefaultWorkingDirectory)/out/logs" - - steps: - - checkout: none - - - pwsh: | - $tspConfigPathPattern = '^specification\/([^\/]+\/)+tspconfig\.yaml$' - $readmePathPattern = '^specification\/([^\/]+\/){2,}readme\.md$' - if (('${{ parameters.ConfigType }}' -eq 'TypeSpec') -and ('${{ parameters.ConfigPath }}' -notmatch $tspConfigPathPattern)) { - Write-Host "##vso[task.logissue type=error]'ConfigPath' must be a valid 'tspconfig.yaml' file path when 'ConfigType' is set to 'TypeSpec'. For example, 'specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml'" - Exit 1 - } elseif (('${{ parameters.ConfigType }}' -eq 'OpenAPI') -and ('${{ parameters.ConfigPath }}' -notmatch $readmePathPattern)) { - Write-Host "##vso[task.logissue type=error]'ConfigPath' must be a valid 'readme.md' file path when 'ConfigType' is set to 'OpenAPI'. For example, 'specification/appplatform/resource-manager/readme.md'" - Exit 1 - } - if (('$(Build.Reason)' -ne 'PullRequest') -and ('${{ parameters.SpecBatchTypes }}' -eq '')) { - if ('${{ parameters.ApiVersion }}' -eq '') { - Write-Host "##vso[task.logissue type=error]Api version parameter is empty. Ensure it is set when trigger the pipeline." - Exit 1 - } - if (('${{ parameters.SdkReleaseType }}' -eq 'stable') -and ('${{ parameters.ApiVersion}}' -contains 'preview')) { - Write-Host "##vso[task.logissue type=error]'SdkReleaseType' must be set to 'beta' for the preview API specifications." - Exit 1 - } - } - $urlPattern = '^https://github\.com/(?[^/]+)/(?[^/]+)' - if ('${{ parameters.SpecRepoUrl }}' -match $urlPattern) { - $specRepoOwner = $Matches['organization'] - Write-Host "##vso[task.setvariable variable=SpecRepoOwner]$specRepoOwner" - Write-Host "SpecRepoOwner variable set to: $specRepoOwner" - - $specRepoName = $Matches['repository'] - Write-Host "##vso[task.setvariable variable=SpecRepoName]$specRepoName" - Write-Host "SpecRepoName variable set to: $specRepoName" - - $specRepoDirectory = "$(System.DefaultWorkingDirectory)/$specRepoName" - Write-Host "##vso[task.setvariable variable=SpecRepoDirectory]$specRepoDirectory" - Write-Host "SpecRepoDirectory variable set to: $specRepoDirectory" - } - - if ('${{ parameters.SdkRepoUrl }}' -match $urlPattern) { - if ('${{ parameters.SpecRepoUrl }}'.EndsWith('-pr') -and (-not '${{ parameters.SdkRepoUrl }}'.EndsWith('-pr'))) { - Write-Host "##vso[task.logissue type=error]SdkRepoUrl must be a private repository if SpecRepoUrl is a private repository." - Exit 1 - } - - $sdkRepoOwner = $Matches['organization'] - Write-Host "##vso[task.setvariable variable=SdkRepoOwner]$sdkRepoOwner" - Write-Host "SdkRepoOwner variable set to: $sdkRepoOwner" - - $sdkRepoName = $Matches['repository'] - Write-Host "##vso[task.setvariable variable=SdkRepoName]$sdkRepoName" - Write-Host "SdkRepoName variable set to: $sdkRepoName" - - $sdkRepoDirectory = "$(System.DefaultWorkingDirectory)/$sdkRepoName" - Write-Host "##vso[task.setvariable variable=SdkRepoDirectory]$sdkRepoDirectory" - Write-Host "SdkRepoDirectory variable set to: $sdkRepoDirectory" - } - - if ([string]::IsNullOrEmpty($SpecRepoOwner) -or [string]::IsNullOrEmpty($SpecRepoName) -or [string]::IsNullOrEmpty($SdkRepoOwner) -or [string]::IsNullOrEmpty($SdkRepoName)) { - Write-Host "##vso[task.logissue type=error]One or more required variables is empty or invalid. Ensure that SpecRepoUrl and SdkRepoUrl are set to valid GitHub repository URLs." - Exit 1 - } - - displayName: "Create Run Time Variables" - - - template: /eng/common/pipelines/templates/steps/sparse-checkout.yml - parameters: - Paths: - - '/*' - - '!sdk/**/test-recordings/*' - - '!sdk/**/recordings/*' - - '!sdk/**/SessionRecords/*' - - '!sdk/**/session-records/*' - Repositories: - - Name: $(SpecRepoOwner)/$(SpecRepoName) - Commitish: $(SpecRepoCommit) - WorkingDirectory: $(SpecRepoDirectory) - - Name: $(SdkRepoOwner)/$(SdkRepoName) - Commitish: ${{ parameters.SdkRepoCommit }} - WorkingDirectory: $(SdkRepoDirectory) - SkipCheckoutNone: true - - - task: NodeTool@0 - inputs: - versionSpec: $(NodeVersion) - displayName: 'Install Node.js' - - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PythonVersion) - - - script: | - optional_params="" - sdk_gen_info="Configurations: " - - if [ "$(Build.Reason)" != "PullRequest" ] && [ "${{ parameters.ConfigType }}" = "TypeSpec" ]; then - optional_params="$optional_params --tsp-config-relative-path ${{ parameters.ConfigPath }}" - sdk_gen_info="$sdk_gen_info '${{ parameters.ConfigPath }}'," - elif [ "$(Build.Reason)" != "PullRequest" ] && [ "${{ parameters.ConfigType }}" = "OpenAPI" ]; then - optional_params="$optional_params --readme-relative-path ${{ parameters.ConfigPath }}" - sdk_gen_info="$sdk_gen_info '${{ parameters.ConfigPath }}'," - fi - - if [ "$(Build.Reason)" = "PullRequest" ]; then - optional_params="$optional_params --pr-number $(System.PullRequest.PullRequestNumber)" - specPrUrl="${{ parameters.SpecRepoUrl }}/pull/$(System.PullRequest.PullRequestNumber)" - sdk_gen_info="$sdk_gen_info spec PR: $specPrUrl" - fi - - if [ "${{ parameters.SpecBatchTypes }}" != "" ]; then - optional_params="$optional_params --rm ${{ parameters.SpecBatchTypes }}" - sdk_gen_info="$sdk_gen_info SpecBatchTypes: ${{ parameters.SpecBatchTypes }}," - fi - - if [ "${{ parameters.ApiVersion }}" != "" ]; then - optional_params="$optional_params --api-version ${{ parameters.ApiVersion }} --sdk-release-type ${{ parameters.SdkReleaseType }}" - sdk_gen_info="$sdk_gen_info API Version: ${{ parameters.ApiVersion }}, SDK Release Type: ${{ parameters.SdkReleaseType }}," - fi - sdk_gen_info="$sdk_gen_info and CommitSHA: '$(SpecRepoCommit)' in SpecRepo: '${{ parameters.SpecRepoUrl }}'" - echo "##vso[task.setvariable variable=GeneratedSDKInformation]$sdk_gen_info" - echo "$sdk_gen_info" - - cd $(SpecRepoDirectory) - echo "##[group]Run npm ci" - npm ci - echo "##[endgroup]" - node $(SpecRepoDirectory)/eng/tools/spec-gen-sdk-runner/cmd/spec-gen-sdk-runner.js \ - --scp $(SpecRepoDirectory) \ - --sdp $(SdkRepoDirectory) \ - --wf $(System.DefaultWorkingDirectory) \ - --lang $(SdkRepoName) \ - --commit $(SpecRepoCommit) \ - --spec-repo-url ${{ parameters.SpecRepoUrl }} \ - --tr true \ - $optional_params - displayName: 'Generate SDK' - - - task: Powershell@2 - inputs: - filePath: $(SdkRepoDirectory)/eng/common/scripts/Save-Package-Properties.ps1 - arguments: > - -ServiceDirectory $(GeneratedSDK.ServiceName) - -OutDirectory $(GeneratedSDK.StagedArtifactsFolder)/PackageInfo - pwsh: true - workingDirectory: $(SdkRepoDirectory) - displayName: Dump Package properties - condition: and(succeeded(), ne(variables['GeneratedSDK.ServiceName'], ''), ne(variables['GeneratedSDK.StagedArtifactsFolder'], '')) - - - template: /eng/common/pipelines/templates/steps/publish-1es-artifact.yml - parameters: - ArtifactName: $(BreakingChangeLabelArtifactName) - ArtifactPath: "$(System.DefaultWorkingDirectory)/$(BreakingChangeLabelArtifactPath)" - CustomCondition: and(succeeded(), ne(variables['BreakingChangeLabelArtifactName'], '')) - - - template: /eng/common/pipelines/templates/steps/publish-1es-artifact.yml - parameters: - ArtifactName: $(PipelineArtifactsName) - ArtifactPath: $(GeneratedSDK.StagedArtifactsFolder) - CustomCondition: and(succeeded(), ne(variables['GeneratedSDK.StagedArtifactsFolder'], '')) - - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - - task: PowerShell@2 - displayName: Add label to the spec PR - condition: and(eq(variables['Build.Reason'], 'PullRequest'), ne(variables['BreakingChangeLabel'], ''), eq(variables['BreakingChangeLabelAction'], 'add')) - inputs: - pwsh: true - workingDirectory: $(SdkRepoDirectory) - filePath: $(SdkRepoDirectory)/eng/common/scripts/Add-IssueLabels.ps1 - arguments: > - -RepoOwner $(SpecRepoOwner) - -RepoName $(SpecRepoName) - -IssueNumber "$(System.PullRequest.PullRequestNumber)" - -Labels $(BreakingChangeLabel) - -AuthToken "$(azuresdk-github-pat)" - - - task: PowerShell@2 - displayName: Remove label from the spec PR - condition: and(eq(variables['Build.Reason'], 'PullRequest'), ne(variables['BreakingChangeLabel'], ''), eq(variables['BreakingChangeLabelAction'], 'remove')) - inputs: - pwsh: true - workingDirectory: $(SdkRepoDirectory) - filePath: $(SdkRepoDirectory)/eng/common/scripts/Remove-IssueLabel.ps1 - arguments: > - -RepoOwner $(SpecRepoOwner) - -RepoName $(SpecRepoName) - -IssueNumber "$(System.PullRequest.PullRequestNumber)" - -LabelName $(BreakingChangeLabel) - -AuthToken "$(azuresdk-github-pat)" - - - ${{ if eq(parameters.SkipPullRequestCreation, false) }}: - - template: /eng/common/pipelines/templates/steps/git-push-changes.yml - parameters: - BaseRepoBranch: $(PrBranch)-$(Build.BuildId) - BaseRepoOwner: azure-sdk - CommitMsg: $(GeneratedSDKInformation) - TargetRepoOwner: $(SdkRepoOwner) - TargetRepoName: $(SdkRepoName) - PushArgs: "--force" - WorkingDirectory: $(SdkRepoDirectory) - ScriptDirectory: $(SdkRepoDirectory)/eng/common/scripts - - - ${{ if not(endsWith(variables['SdkRepoName'], '-pr')) }}: - - task: PowerShell@2 - displayName: Create pull request - condition: and(succeeded(), eq(variables['HasChanges'], 'true'), ne(variables['Build.Reason'], 'PullRequest')) - inputs: - pwsh: true - workingDirectory: $(SdkRepoDirectory) - filePath: $(SdkRepoDirectory)/eng/common/scripts/Submit-PullRequest.ps1 - arguments: > - -RepoOwner "$(SdkRepoOwner)" - -RepoName "$(SdkRepoName)" - -BaseBranch "main" - -PROwner "azure-sdk" - -PRBranch "$(PrBranch)-$(Build.BuildId)" - -AuthToken "$(azuresdk-github-pat)" - -PRTitle "$(PrTitle)-generated-from-$(Build.DefinitionName)-$(Build.BuildId)" - -PRBody "$(GeneratedSDKInformation)" - -OpenAsDraft $true - - - ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: - - template: /eng/common/pipelines/templates/steps/detect-api-changes.yml - parameters: - ArtifactPath: $(GeneratedSDK.StagedArtifactsFolder) - ArtifactName: $(PipelineArtifactsName) - RepoRoot: $(SdkRepoDirectory) +stages: +- stage: Build + displayName: 'SDK Generation' + jobs: + - job: + timeoutInMinutes: 2400 + + variables: + - template: /eng/pipelines/templates/variables/image.yml + - name: NodeVersion + value: '22.13.x' + - name: PythonVersion + value: '3.13' + - name: PipelineArtifactsName + value: 'packages' + - name: SpecRepoCommit + value: $(Build.SourceVersion) + + pool: + name: $(LINUXPOOL) + vmImage: $(LINUXVMIMAGE) + os: linux + + steps: + - checkout: none + + - pwsh: | + $tspConfigPathPattern = '^specification\/([^\/]+\/)+tspconfig\.yaml$' + $readmePathPattern = '^specification\/([^\/]+\/){2,}readme\.md$' + if (('${{ parameters.ConfigType }}' -eq 'TypeSpec') -and ('${{ parameters.ConfigPath }}' -notmatch $tspConfigPathPattern)) { + Write-Host "##vso[task.logissue type=error]'ConfigPath' must be a valid 'tspconfig.yaml' file path when 'ConfigType' is set to 'TypeSpec'. For example, 'specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml'" + Exit 1 + } elseif (('${{ parameters.ConfigType }}' -eq 'OpenAPI') -and ('${{ parameters.ConfigPath }}' -notmatch $readmePathPattern)) { + Write-Host "##vso[task.logissue type=error]'ConfigPath' must be a valid 'readme.md' file path when 'ConfigType' is set to 'OpenAPI'. For example, 'specification/appplatform/resource-manager/readme.md'" + Exit 1 + } + if (('$(Build.Reason)' -ne 'PullRequest') -and ('${{ parameters.SpecBatchTypes }}' -eq '')) { + if (('${{ parameters.SdkReleaseType }}' -eq 'stable') -and ('${{ parameters.ApiVersion}}' -match 'preview')) { + Write-Host "##vso[task.logissue type=error]'SDK release type' must be set to 'beta' for the preview API specifications." + Exit 1 + } + } + $urlPattern = '^https://github\.com/(?[^/]+)/(?[^/]+)' + if ('${{ parameters.SpecRepoUrl }}' -match $urlPattern) { + $specRepoOwner = $Matches['organization'] + Write-Host "##vso[task.setvariable variable=SpecRepoOwner]$specRepoOwner" + Write-Host "SpecRepoOwner variable set to: $specRepoOwner" + + $specRepoName = $Matches['repository'] + Write-Host "##vso[task.setvariable variable=SpecRepoName]$specRepoName" + Write-Host "SpecRepoName variable set to: $specRepoName" + + $specRepoDirectory = "$(System.DefaultWorkingDirectory)/$specRepoName" + Write-Host "##vso[task.setvariable variable=SpecRepoDirectory]$specRepoDirectory" + Write-Host "SpecRepoDirectory variable set to: $specRepoDirectory" + } + + if ('${{ parameters.SdkRepoUrl }}' -match $urlPattern) { + if ('${{ parameters.SpecRepoUrl }}'.EndsWith('-pr') -and (-not '${{ parameters.SdkRepoUrl }}'.EndsWith('-pr'))) { + Write-Host "##vso[task.logissue type=error]SdkRepoUrl must be a private repository if SpecRepoUrl is a private repository." + Exit 1 + } + + $sdkRepoOwner = $Matches['organization'] + Write-Host "##vso[task.setvariable variable=SdkRepoOwner]$sdkRepoOwner" + Write-Host "SdkRepoOwner variable set to: $sdkRepoOwner" + + $sdkRepoName = $Matches['repository'] + Write-Host "##vso[task.setvariable variable=SdkRepoName]$sdkRepoName" + Write-Host "SdkRepoName variable set to: $sdkRepoName" + + $sdkRepoDirectory = "$(System.DefaultWorkingDirectory)/$sdkRepoName" + Write-Host "##vso[task.setvariable variable=SdkRepoDirectory]$sdkRepoDirectory" + Write-Host "SdkRepoDirectory variable set to: $sdkRepoDirectory" + } + + if ([string]::IsNullOrEmpty($SpecRepoOwner) -or [string]::IsNullOrEmpty($SpecRepoName) -or [string]::IsNullOrEmpty($SdkRepoOwner) -or [string]::IsNullOrEmpty($SdkRepoName)) { + Write-Host "##vso[task.logissue type=error]One or more required variables is empty or invalid. Ensure that SpecRepoUrl and SdkRepoUrl are set to valid GitHub repository URLs." + Exit 1 + } + + displayName: "Create Run Time Variables" + + - template: /eng/common/pipelines/templates/steps/sparse-checkout.yml + parameters: + Paths: + - '/*' + - '!sdk/**/test-recordings/*' + - '!sdk/**/recordings/*' + - '!sdk/**/SessionRecords/*' + - '!sdk/**/session-records/*' + Repositories: + - Name: $(SpecRepoOwner)/$(SpecRepoName) + Commitish: $(SpecRepoCommit) + WorkingDirectory: $(SpecRepoDirectory) + - Name: $(SdkRepoOwner)/$(SdkRepoName) + Commitish: ${{ parameters.SdkRepoCommit }} + WorkingDirectory: $(SdkRepoDirectory) + SkipCheckoutNone: true + ${{ if and(eq(variables['System.TeamProject'], 'internal'), endsWith(variables['Build.Repository.Name'], '-pr')) }}: + TokenToUseForAuth: $(azuresdk-github-pat) + PreserveAuthToken: true + + - task: NodeTool@0 + inputs: + versionSpec: $(NodeVersion) + displayName: 'Install Node.js' + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PythonVersion) + + - script: | + optional_params="" + sdk_gen_info="Configurations: " + + if [ "$(Build.Reason)" != "PullRequest" ] && [ "${{ parameters.ConfigType }}" = "TypeSpec" ]; then + optional_params="$optional_params --tsp-config-relative-path ${{ parameters.ConfigPath }}" + sdk_gen_info="$sdk_gen_info '${{ parameters.ConfigPath }}'," + elif [ "$(Build.Reason)" != "PullRequest" ] && [ "${{ parameters.ConfigType }}" = "OpenAPI" ]; then + optional_params="$optional_params --readme-relative-path ${{ parameters.ConfigPath }}" + sdk_gen_info="$sdk_gen_info '${{ parameters.ConfigPath }}'," + fi + + updatedSpecRepoCommit="$(SpecRepoCommit)" + if [ "$(Build.Reason)" = "PullRequest" ]; then + optional_params="$optional_params --pr-number $(System.PullRequest.PullRequestNumber)" + specPrUrl="${{ parameters.SpecRepoUrl }}/pull/$(System.PullRequest.PullRequestNumber)" + sdk_gen_info="$sdk_gen_info spec PR: $specPrUrl" + updatedSpecRepoCommit="$(System.PullRequest.SourceCommitId)" + fi + + if [ "${{ parameters.SpecBatchTypes }}" != "" ]; then + optional_params="$optional_params --batch-type ${{ parameters.SpecBatchTypes }}" + sdk_gen_info="$sdk_gen_info SpecBatchTypes: ${{ parameters.SpecBatchTypes }}," + fi + + if [ "${{ parameters.ApiVersion }}" != "" ]; then + optional_params="$optional_params --api-version ${{ parameters.ApiVersion }} --sdk-release-type ${{ parameters.SdkReleaseType }}" + sdk_gen_info="$sdk_gen_info API Version: ${{ parameters.ApiVersion }}, SDK Release Type: ${{ parameters.SdkReleaseType }}," + fi + sdk_gen_info="$sdk_gen_info and CommitSHA: '$updatedSpecRepoCommit' in SpecRepo: '${{ parameters.SpecRepoUrl }}'" + echo "##vso[task.setvariable variable=GeneratedSDKInformation]$sdk_gen_info" + echo "$sdk_gen_info" + + cd $(SpecRepoDirectory) + echo "##[group]Run npm ci" + npm ci + echo "##[endgroup]" + node $(SpecRepoDirectory)/eng/tools/spec-gen-sdk-runner/cmd/spec-gen-sdk-runner.js \ + --scp $(SpecRepoDirectory) \ + --sdp $(SdkRepoDirectory) \ + --wf $(System.DefaultWorkingDirectory) \ + --lang $(SdkRepoName) \ + --commit $updatedSpecRepoCommit \ + --spec-repo-url ${{ parameters.SpecRepoUrl }} \ + $optional_params || true # return true always to suppress exit error logging + displayName: 'Generate SDK' + + - template: /eng/common/pipelines/templates/steps/publish-artifact.yml + parameters: + ArtifactName: $(SpecGenSdkArtifactName) + ArtifactPath: "$(System.DefaultWorkingDirectory)/$(SpecGenSdkArtifactPath)" + CustomCondition: and(succeededOrFailed(), ne(variables['SpecGenSdkArtifactName'], '')) + + - template: /eng/common/pipelines/templates/steps/publish-artifact.yml + parameters: + ArtifactName: $(PipelineArtifactsName) + ArtifactPath: $(StagedArtifactsFolder) + CustomCondition: and(succeededOrFailed(), ne(variables['StagedArtifactsFolder'], '')) + + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), eq(parameters.SkipPullRequestCreation, false), ne(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/common/pipelines/templates/steps/git-push-changes.yml + parameters: + BaseRepoBranch: $(PrBranch)-$(Build.BuildId) + BaseRepoOwner: azure-sdk + CommitMsg: $(GeneratedSDKInformation) + TargetRepoOwner: $(SdkRepoOwner) + TargetRepoName: $(SdkRepoName) + PushArgs: "--force" + WorkingDirectory: $(SdkRepoDirectory) + ScriptDirectory: $(SdkRepoDirectory)/eng/common/scripts + + - task: PowerShell@2 + displayName: Create pull request + condition: and(succeeded(), eq(variables['HasChanges'], 'true'), ne(variables['Build.Reason'], 'PullRequest'), not(endsWith(variables['SdkRepoName'], '-pr'))) + inputs: + pwsh: true + workingDirectory: $(SdkRepoDirectory) + filePath: $(SdkRepoDirectory)/eng/common/scripts/Submit-PullRequest.ps1 + arguments: > + -RepoOwner "$(SdkRepoOwner)" + -RepoName "$(SdkRepoName)" + -BaseBranch "main" + -PROwner "azure-sdk" + -PRBranch "$(PrBranch)-$(Build.BuildId)" + -AuthToken "$(azuresdk-github-pat)" + -PRTitle "$(PrTitle)-generated-from-$(Build.DefinitionName)-$(Build.BuildId)" + -PRBody "$(GeneratedSDKInformation)" + -OpenAsDraft $true + + - ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: + - pwsh: | + . $(SpecRepoDirectory)/eng/common/scripts/Helpers/ApiView-Helpers.ps1 + Create-API-Review ` + -specGenSDKArtifactPath "$(System.DefaultWorkingDirectory)/$(SpecGenSdkArtifactPath)/$(SpecGenSdkArtifactName).json" ` + -apiviewArtifactName "$(PipelineArtifactsName)" ` + -buildId "$(Build.BuildId)" ` + -commitish "$(System.PullRequest.SourceCommitId)" ` + -repoName "$(SpecRepoOwner)/$(SpecRepoName)" ` + -pullRequestNumber "$(System.PullRequest.PullRequestNumber)" + displayName: 'Detect API Change and Create API Review' + condition: and(succeeded(), eq(variables['HasAPIViewArtifact'], 'true')) + + - template: /eng/common/pipelines/templates/steps/publish-artifact.yml + parameters: + ArtifactName: "spec-gen-sdk-logs" + ArtifactPath: "$(System.DefaultWorkingDirectory)/out/logs" + CustomCondition: succeededOrFailed() diff --git a/eng/pipelines/templates/variables/image.yml b/eng/pipelines/templates/variables/image.yml index 1403337835c3..52d5c59f3c8e 100644 --- a/eng/pipelines/templates/variables/image.yml +++ b/eng/pipelines/templates/variables/image.yml @@ -11,9 +11,9 @@ variables: value: Azure Pipelines - name: LINUXVMIMAGE - value: ubuntu-20.04 + value: ubuntu-24.04 - name: LINUXNEXTVMIMAGE - value: ubuntu-22.04 + value: ubuntu-24.04 - name: WINDOWSVMIMAGE value: windows-2022 - name: MACVMIMAGE diff --git a/eng/pipelines/typespec-apiview.yml b/eng/pipelines/typespec-apiview.yml index 43029112bb9f..f7333bd0e411 100644 --- a/eng/pipelines/typespec-apiview.yml +++ b/eng/pipelines/typespec-apiview.yml @@ -7,14 +7,14 @@ parameters: default: 'typeSpecAPIViewArtifacts' - name: APIViewAPIUri type: string - default: 'https://apiview.dev/PullRequest/DetectAPIChanges' + default: 'https://apiview.dev/api/PullRequests/CreateAPIRevisionIfAPIHasChanges' # Please use 'https://apiviewstagingtest.com/PullRequest/DetectAPIChanges' for testing purposes jobs: - job: pool: name: azsdk-pool - demands: ImageOverride -equals ubuntu-22.04 + demands: ImageOverride -equals ubuntu-24.04 variables: - template: /eng/pipelines/templates/variables/globals.yml @@ -53,6 +53,6 @@ jobs: -RepoName $(Build.Repository.Name) ` -PullRequestNumber $(System.PullRequest.PullRequestNumber)` -Language 'TypeSpec' ` - -CommitSha $(Build.SourceVersion) + -CommitSha $(System.PullRequest.SourceCommitId) displayName: Create TypeSpec APIView condition: and(succeeded(), ne(variables['Agent.JobStatus'], 'SucceededWithIssues')) diff --git a/eng/scripts/ChangedFiles-Functions.ps1 b/eng/scripts/ChangedFiles-Functions.ps1 index 9fda993978fa..df12b1c5f696 100644 --- a/eng/scripts/ChangedFiles-Functions.ps1 +++ b/eng/scripts/ChangedFiles-Functions.ps1 @@ -58,7 +58,7 @@ function Get-ChangedCoreFiles($changedFiles = (Get-ChangedFiles)) { $coreFiles = $changedFiles.Where({ $_.StartsWith(".github/") -or - $_.StartsWith("eng/") -or + ($_.StartsWith("eng/") -and !$_.StartsWith("eng/common/")) -or $_.StartsWith("specification/common-types/") -or $_ -in $rootFiles }) diff --git a/eng/scripts/Copy-ApiVersion-Functions.ps1 b/eng/scripts/Copy-ApiVersion-Functions.ps1 index 5b72003d73fd..f920d50a9ac3 100644 --- a/eng/scripts/Copy-ApiVersion-Functions.ps1 +++ b/eng/scripts/Copy-ApiVersion-Functions.ps1 @@ -29,6 +29,7 @@ function Get-ReadmeWithNewTag($readmeContent, $tagContent) { return $readmeContent -replace '(?s)(### tag: package.*)', "$tagContent`n`$1" } + function Get-ReadmeWithLatestTag($readmeContent, $newApiVersion, $newApiVersionStatus ) { # Get the current tag date $currentTag = $readmeContent -match '(?m)^(tag:\s*)(package-)(.*)(?(?\d{4}-\d{2}(-\d{2})?)|(?\d+\.\d+))' @@ -54,7 +55,10 @@ function Get-ReadmeWithLatestTag($readmeContent, $newApiVersion, $newApiVersionS } } - $tagVersion = $newApiVersion -match '(?(?\d{4}-\d{2}(-\d{2})?)|(?\d+\.\d+)(-preview(\.\d+)?)?)' + # Extract the new API version, supporting two formats: + # - Date format: YYYY-MM(-DD)?(-preview)? e.g. 2024-01, 2024-01-01, 2024-01-preview + # - Semantic versioning: X.Y(-preview)? or X.Y(-preview.\d+)? e.g. 7.6, 7.6-preview, 7.6-preview.1 + $tagVersion = $newApiVersion -match '(?(?\d{4}-\d{2}(-\d{2})?)(-preview)?|(?\d+\.\d+(-preview(\.\d+)?)?))' $tagVersion = $Matches['apiVersion'] if ($newApiVersionStatus -eq "preview") { $tagVersion = "preview-" + $tagVersion diff --git a/eng/scripts/Copy-ApiVersion.ps1 b/eng/scripts/Copy-ApiVersion.ps1 index da91cc15fc8f..d8c2551c59ab 100644 --- a/eng/scripts/Copy-ApiVersion.ps1 +++ b/eng/scripts/Copy-ApiVersion.ps1 @@ -107,7 +107,7 @@ This allows reviewers to easily diff subsequent changes against the previous spe foreach ($file in Get-ChildItem $newDirectory -File -Recurse) { Write-Verbose "Replacing any API versions in $file" $content = $file | Get-Content -Raw - $content -replace $oldApiVersion, $newApiVersion | Set-Content $file.FullName + $content -replace $oldApiVersion, $newApiVersion | Set-Content $file.FullName -NoNewline } # Commit just the version changes. diff --git a/eng/scripts/Create-APIView.ps1 b/eng/scripts/Create-APIView.ps1 index 9905ea14964a..e855e93765e6 100644 --- a/eng/scripts/Create-APIView.ps1 +++ b/eng/scripts/Create-APIView.ps1 @@ -310,6 +310,12 @@ function New-SwaggerAPIViewTokens { } } + if ($autoRestConfigInfo.Count -eq 0) { + LogWarning " No AutoRest configuration found for the changed swagger files in the current PR..." + Write-Host "##vso[task.complete result=SucceededWithIssues;]DONE" + exit 0 + } + LogGroupStart " Swagger APIView Tokens will be generated for the following configuration files..." $autoRestConfigInfo.GetEnumerator() | ForEach-Object { LogInfo " - $($_.Key)" @@ -355,9 +361,15 @@ function New-SwaggerAPIViewTokens { } git checkout $currentBranch + $generatedSwaggerArtifacts = Get-ChildItem -Path $swaggerAPIViewArtifactsDirectory -Recurse + if ($generatedSwaggerArtifacts.Count -eq 0) { + LogWarning " No Swagger APIView Tokens generated..." + Write-Host "##vso[task.complete result=SucceededWithIssues;]DONE" + exit 0 + } LogGroupStart " See all generated Swagger APIView Artifacts..." - Get-ChildItem -Path $swaggerAPIViewArtifactsDirectory -Recurse + $generatedSwaggerArtifacts LogGroupEnd } @@ -550,16 +562,24 @@ function New-RestSpecsAPIViewReviews { $query.Add('pullRequestNumber', $PullRequestNumber) $query.Add('packageName', $_.BaseName) $query.Add('language', $Language) - $query.Add('commentOnPR', $true) + $query.Add('commentOnPR', $false) $uri = [System.UriBuilder]$APIViewUri $uri.Query = $query.ToString() + $correlationId = [System.Guid]::NewGuid().ToString() + $headers = @{ + "x-correlation-id" = $correlationId + } + LogInfo "Create APIView for resource provider '$($_.BaseName)'" - LogInfo "APIView Uri: $($uri.Uri)" + LogInfo "Request URI: $($uri.Uri.OriginalString)" + LogInfo "Correlation ID: $correlationId" try { - Invoke-WebRequest -Method 'GET' -Uri $uri.Uri -MaximumRetryCount 3 + $Response = Invoke-WebRequest -Method 'GET' -Uri $uri.Uri -Headers $headers -MaximumRetryCount 3 + $responseContent = $Response.Content | ConvertFrom-Json | ConvertTo-Json -Depth 10 + LogSuccess $responseContent } catch { LogError "Failed to create APIView for resource provider '$($_.BaseName)'. Error: $($_.Exception.Response)" diff --git a/eng/scripts/Tests/Copy-ApiVersion.Tests.ps1 b/eng/scripts/Tests/Copy-ApiVersion.Tests.ps1 index a560cafc76f3..d45429ad988a 100644 --- a/eng/scripts/Tests/Copy-ApiVersion.Tests.ps1 +++ b/eng/scripts/Tests/Copy-ApiVersion.Tests.ps1 @@ -52,10 +52,35 @@ Describe "Copy-ApiVersion regex tests" { apiVersion = "2024-01-01" versionStatus = "stable" }, + @{ + inputReadme = '..\..\..\..\specification\compute\resource-manager\readme.md' + apiVersion = "2025-02" + versionStatus = "preview" + }, + @{ + inputReadme = '..\..\..\..\specification\compute\resource-manager\readme.md' + apiVersion = "2025-03-01" + versionStatus = "preview" + }, + @{ + inputReadme = '..\..\..\..\specification\securityinsights\resource-manager\readme.md' + apiVersion = "7.6" + versionStatus = "preview" + }, + @{ + inputReadme = '..\..\..\..\specification\keyvault\data-plane\readme.md' + apiVersion = "7.6-preview" + versionStatus = "preview" + }, @{ inputReadme = '..\..\..\..\specification\keyvault\data-plane\readme.md' apiVersion = "7.6-preview.1" versionStatus = "preview" + }, + @{ + inputReadme = '..\..\..\..\specification\securityinsights\resource-manager\readme.md' + apiVersion = "2025-05-01-preview" + versionStatus = "preview" } ) { param($inputReadme, $apiVersion, $versionStatus) diff --git a/eng/scripts/TypeSpec-Generate-Sdk.ps1 b/eng/scripts/TypeSpec-Generate-Sdk.ps1 deleted file mode 100644 index 616a24ae4c44..000000000000 --- a/eng/scripts/TypeSpec-Generate-Sdk.ps1 +++ /dev/null @@ -1,105 +0,0 @@ -### -# Conventient usage: -# 1) generate specific language sdk based on current typespec folder -# ./TypeSpec-Generate-Sdk.ps1 -SdkLanguage {language} -# e.g. ./TypeSpec-Generate-Sdk.ps1 -SdkLanguage dotnet -# The pre-requisite is the sdk repos path in local machine follows below convention: -# 1). "azure-rest-api-specs" and "sdk-repos" are peer folder under same parent folder -# 2). each sdk language repo is under "sdk-repos" folder, i.e. -# sdk-repos/azure-sdk-for-net -# sdk-repos/azure-sdk-for-java -# sdk-repos/azure-sdk-for-python -# sdk-repos/azure-sdk-for-js -### - -[CmdletBinding()] -param ( - [Parameter(Position = 0)] - [ValidateNotNullOrEmpty()] - [string] $SdkRepoRootDirectory, - [Parameter(Position = 1)] - [string] $TypeSpecProjectDirectory = ".", # A directory of `tspconfig.yaml` or a remoteUrl of `tspconfig.yaml` - [Parameter(Position = 2)] - [string] $CommitHash, - [Parameter(Position = 3)] - [string] $RepoUrl, - [string] $SdkLanguage -) - -$TypeSpecProjectDirectory = (Resolve-Path $TypeSpecProjectDirectory).Path - -if ($SdkLanguage) { - # example value of TypeSpecProjectDirectory: /workspaces/azure-rest-api-specs/specification/contosowidgetmanager/Contoso.WidgetManager - $index = $TypeSpecProjectDirectory.IndexOf("specification") - if ($index -eq -1) { - Write-Error "The input TypeSpecProjectDirectory parameter doesn't have 'specification' folder in its path: $TypeSpecProjectDirectory" - exit 1 - } - $specFolderPath = $TypeSpecProjectDirectory.Substring(0, $index - 1) - $rootPath = Split-Path $specFolderPath -Parent - $sdkRepoRoot = Join-Path $rootPath "sdk-repos" - if (!(Test-Path $sdkRepoRoot)) { - Write-Error "sdk repos root folder doesn't exist: $sdkRepoRoot" - exit 1 - } - - # trying to locate the default sdk repo folder under 'sdk-repos' folder by language value - switch ($SdkLanguage) { - "dotnet" { - Write-Host "Generating dotnet sdk code ..." - $sdkRepoPath = Join-Path $sdkRepoRoot "azure-sdk-for-net" - if (!(Test-Path $sdkRepoPath)) { - Write-Error "sdk repo doesn't exist: $sdkRepoPath" - exit 1 - } - } - "java" { - Write-Host "Generating java sdk code ..." - $sdkRepoPath = Join-Path $sdkRepoRoot "azure-sdk-for-java" - if (!(Test-Path $sdkRepoPath)) { - Write-Error "sdk repo doesn't exist: $sdkRepoPath" - exit 1 - } - } - "python" { - Write-Host "Generating python sdk code ..." - $sdkRepoPath = Join-Path $sdkRepoRoot "azure-sdk-for-python" - if (!(Test-Path $sdkRepoPath)) { - Write-Error "sdk repo doesn't exist: $sdkRepoPath" - exit 1 - } - } - "js" { - Write-Host "Generating js sdk code ..." - $sdkRepoPath = Join-Path $sdkRepoRoot "azure-sdk-for-js" - if (!(Test-Path $sdkRepoPath)) { - Write-Error "sdk repo doesn't exist: $sdkRepoPath" - exit 1 - } - } - default { - Write-Error "The input SdkLanguage parameter should be one of this values: dotnet, java, python, js" - exit 1 - } - } - $SdkRepoRootDirectory = $sdkRepoPath -} - -try { - Push-Location $SdkRepoRootDirectory - $commonScript = Join-Path . "eng/common/scripts/TypeSpec-Project-Process.ps1" - if (Test-Path $commonScript) { - . $commonScript -TypeSpecProjectDirectory $TypeSpecProjectDirectory -CommitHash $CommitHash -RepoUrl $RepoUrl - if ($LASTEXITCODE) { - exit $LASTEXITCODE - } - } - else { - Write-Error "Cannot find $commonScript at $SdkRepoRootDirectory" - } -} -finally { - Pop-Location -} - -exit 0 \ No newline at end of file diff --git a/eng/scripts/TypeSpec-Validation.ps1 b/eng/scripts/TypeSpec-Validation.ps1 index 0ea5255aa788..84d95d468cf1 100644 --- a/eng/scripts/TypeSpec-Validation.ps1 +++ b/eng/scripts/TypeSpec-Validation.ps1 @@ -1,5 +1,6 @@ [CmdletBinding()] param ( + [switch]$IgnoreCoreFiles = $false, [switch]$CheckAll = $false, [int]$Shard = 0, [int]$TotalShards = 1, @@ -17,10 +18,11 @@ if ($TotalShards -gt 0 -and $Shard -ge $TotalShards) { . $PSScriptRoot/Suppressions-Functions.ps1 . $PSScriptRoot/Array-Functions.ps1 -$typespecFolders, $checkedAll = &"$PSScriptRoot/Get-TypeSpec-Folders.ps1" ` +$typespecFolders, $checkingAllSpecs = &"$PSScriptRoot/Get-TypeSpec-Folders.ps1" ` -BaseCommitish:$BaseCommitish ` -HeadCommitish:$HeadCommitish ` - -CheckAll:$CheckAll + -CheckAll:$CheckAll ` + -IgnoreCoreFiles:$IgnoreCoreFiles if ($TotalShards -gt 1 -and $TotalShards -le $typespecFolders.Count) { $typespecFolders = shardArray $typespecFolders $Shard $TotalShards @@ -33,11 +35,11 @@ foreach ($typespecFolder in $typespecFolders) { $typespecFoldersWithFailures = @() if ($typespecFolders) { - $typespecFolders = $typespecFolders.Split('',[System.StringSplitOptions]::RemoveEmptyEntries) + $typespecFolders = $typespecFolders.Split('', [System.StringSplitOptions]::RemoveEmptyEntries) foreach ($typespecFolder in $typespecFolders) { LogGroupStart "Validating $typespecFolder" - if ($checkedAll) { + if ($checkingAllSpecs) { $suppression = Get-Suppression "TypeSpecValidationAll" $typespecFolder if ($suppression) { $reason = $suppression["reason"] ?? "" @@ -47,14 +49,17 @@ if ($typespecFolders) { } } - LogInfo "npm exec --no -- tsv $typespecFolder" + # Example: '{"checkingAllSpecs"=true}' + $context = @{ checkingAllSpecs = $checkingAllSpecs } | ConvertTo-Json -Compress + + LogInfo "npm exec --no -- tsv $typespecFolder ""$context""" if ($DryRun) { LogGroupEnd continue } - npm exec --no -- tsv $typespecFolder 2>&1 | Write-Host + npm exec --no -- tsv $typespecFolder "$context" 2>&1 | Write-Host if ($LASTEXITCODE) { $typespecFoldersWithFailures += $typespecFolder $errorString = "TypeSpec Validation failed for project $typespecFolder run the following command locally to validate." @@ -69,7 +74,8 @@ if ($typespecFolders) { } LogGroupEnd } -} else { +} +else { if ($CheckAll) { LogError "TypeSpec Validation - All did not validate any specs" LogJobFailure diff --git a/eng/tools/.prettierignore b/eng/tools/.prettierignore new file mode 100644 index 000000000000..b37c5cc2fa28 --- /dev/null +++ b/eng/tools/.prettierignore @@ -0,0 +1,9 @@ +# this file +.prettierignore + +# build artifacts +dist + +# specs in test folders +fixtures +specification diff --git a/eng/tools/.prettierrc.yaml b/eng/tools/.prettierrc.yaml new file mode 100644 index 000000000000..a851a86e34a8 --- /dev/null +++ b/eng/tools/.prettierrc.yaml @@ -0,0 +1,12 @@ +# .prettierrc.yaml + +# Aligned with microsoft/typespec +printWidth: 100 + +overrides: + # tsconfig.json is actually parsed as JSONC + - files: + - tsconfig.json + - tsconfig.*.json + options: + parser: jsonc diff --git a/eng/tools/lint-diff/README.md b/eng/tools/lint-diff/README.md index cd7e80add688..283a6f62127c 100644 --- a/eng/tools/lint-diff/README.md +++ b/eng/tools/lint-diff/README.md @@ -1,8 +1,9 @@ # TODO: REVERIFY THESE DOCS BEFORE MERGE + # LintDiff LintDiff looks at files changed in a commit, runs `autorest` linting over those -files, and looks for "new" errors in the results. +files, and looks for "new" errors in the results. Altered files examined include `readme.md` files and swagger `.json` files. In the case of `readme.md` files the tool will examine tags and other files that @@ -11,13 +12,13 @@ directory structure). ## Run Locally -1. Setup `before` - Clone the specs repo at the *base commit* for the changes you want to test. (generally cloned to main and in a temp location like `/tmp/rest-api-specs-before`) +1. Setup `before` - Clone the specs repo at the _base commit_ for the changes you want to test. (generally cloned to main and in a temp location like `/tmp/rest-api-specs-before`) 1. Setup `after` - Clone the specs repo at the commit for the changes you want to test. -1. Build a list of changed files from `after` generally by running `git diff --name-only > changed-files.txt` -1. From the root of the repo run `npm i` to install dependencies. +1. Build a list of changed files from `after` generally by running `git diff --name-only > changed-files.txt` +1. From the root of the repo run `npm i` to install dependencies. 1. Run `npm exec --no -- lint-diff --before --after --changed-files ` -### Example +### Example In most common cases, changes to LintDiff are in the current state of the repo and you'll need to set up a `before` to run against. diff --git a/eng/tools/lint-diff/cmd/lint-diff.js b/eng/tools/lint-diff/cmd/lint-diff.js index b97bfdd34c7d..b349cbe5ab8d 100755 --- a/eng/tools/lint-diff/cmd/lint-diff.js +++ b/eng/tools/lint-diff/cmd/lint-diff.js @@ -1,5 +1,5 @@ #!/usr/bin/env node -import { main } from "../dist/eng/tools/lint-diff/src/lint-diff.js"; +import { main } from "../dist/src/lint-diff.js"; await main(); diff --git a/eng/tools/lint-diff/package.json b/eng/tools/lint-diff/package.json index e45a9cee662e..e68134ac33ad 100644 --- a/eng/tools/lint-diff/package.json +++ b/eng/tools/lint-diff/package.json @@ -8,31 +8,34 @@ }, "scripts": { "build": "tsc --build", + "clean": "rm -rf dist && rm -rf node_modules", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", - "test:ci": "vitest run --coverage --reporter=verbose", - "clean": "rm -rf dist && rm -rf node_modules" + "test:ci": "vitest run --coverage --reporter=verbose" }, "engines": { "node": ">= 20.0.0" }, "dependencies": { - "@apidevtools/json-schema-ref-parser": "^9.0.9", - "@azure-tools/openapi-tools-common": "^1.2.2", - "@azure/openapi-markdown": "^0.9.4", - "@microsoft.azure/openapi-validator": "2.2.4", - "@types/js-yaml": "^3.12.10", - "autorest": "3.6.1", + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "@azure-tools/specs-shared": "file:../../../.github/shared", + "@microsoft.azure/openapi-validator": "^2.2.4", + "autorest": "^3.7.2", "axios": "^1.8.3", "change-case": "^5.4.4", - "commonmark": "^0.31.2", - "js-yaml": "^3.14.1" + "deep-eql": "^5.0.2", + "marked": "^16.0.0" }, "devDependencies": { + "@types/deep-eql": "^4.0.2", "@types/node": "^18.19.31", "@vitest/coverage-v8": "^3.0.2", "execa": "^9.5.2", "memfs": "^4.17.0", - "typescript": "~5.6.2", + "prettier": "~3.5.3", + "typescript": "~5.8.2", "vitest": "^3.0.2" } } diff --git a/eng/tools/lint-diff/src/correlateResults.ts b/eng/tools/lint-diff/src/correlateResults.ts new file mode 100644 index 000000000000..3d327e75c08b --- /dev/null +++ b/eng/tools/lint-diff/src/correlateResults.ts @@ -0,0 +1,226 @@ +import { basename, join, relative } from "path"; +import { relativizePath, pathExists, isFailure, isWarning } from "./util.js"; +import { AutorestRunResult, BeforeAfter, LintDiffViolation, Source } from "./lintdiff-types.js"; +import { getDefaultTag } from "./markdown-utils.js"; +import { Readme } from "@azure-tools/specs-shared/readme"; + +export async function correlateRuns( + beforePath: string, + beforeChecks: AutorestRunResult[], + afterChecks: AutorestRunResult[], +): Promise> { + const runCorrelations = new Map(); + console.log("\nCorrelating runs..."); + for (const results of afterChecks) { + const { rootPath, readme, tag } = results; + const readmePathRelative = relative(rootPath, readme.path); + + const key = tag ? `${readmePathRelative}#${tag}` : readmePathRelative; + if (runCorrelations.has(key)) { + throw new Error(`Duplicate key found correlating autorest runs: ${key}`); + } + + // Look for candidates matching readme and tag + const beforeCandidates = beforeChecks.filter((r) => { + return relative(beforePath, r.readme.path) === readmePathRelative && r.tag === tag; + }); + if (beforeCandidates.length === 1) { + runCorrelations.set(key, { + before: beforeCandidates[0], + after: results, + }); + continue; + } else if (beforeCandidates.length > 1) { + throw new Error(`Multiple before candidates found for key ${key}`); + } + + // Look for candidates with a matching default tag from the baseline + const beforeReadmePath = join(beforePath, readmePathRelative); + if (await pathExists(beforeReadmePath)) { + const beforeReadme = new Readme(beforeReadmePath); + const defaultTag = await getDefaultTag(beforeReadme); + if (!defaultTag) { + throw new Error(`No default tag found for readme ${readme} in before state`); + } + const beforeDefaultTagCandidates = beforeChecks.filter( + (r) => relative(beforePath, r.readme.path) === readmePathRelative && r.tag === defaultTag, + ); + + if (beforeDefaultTagCandidates.length === 1) { + runCorrelations.set(key, { + before: beforeDefaultTagCandidates[0], + after: results, + }); + continue; + } else if (beforeDefaultTagCandidates.length > 1) { + throw new Error( + `Multiple before candidates found for key ${key} using default tag ${defaultTag}`, + ); + } + + // Look for candidates matching just the readme file + const beforeReadmeCandidate = beforeChecks.filter( + (r) => relative(beforePath, r.readme.path) === readmePathRelative, + ); + if (beforeReadmeCandidate.length === 1) { + runCorrelations.set(key, { + before: beforeReadmeCandidate[0], + after: results, + }); + continue; + } else if (beforeReadmeCandidate.length > 1) { + throw new Error(`Multiple before candidates found for key ${key} using readme ${readme}`); + } + } + + console.log(`No before candidates found for key ${key}, using no baseline`); + runCorrelations.set(key, { + before: null, + after: results, + }); + } + + return runCorrelations; +} + +export function getViolations( + runCorrelations: Map, + affectedSwaggers: Set, +) { + const newViolations: LintDiffViolation[] = []; + const existingViolations: LintDiffViolation[] = []; + + for (const [, { before, after }] of runCorrelations.entries()) { + const beforeViolations = before + ? getLintDiffViolations(before).filter( + (v) => (isFailure(v.level) || isWarning(v.level)) && v.source?.length > 0, + ) + : []; + const afterViolations = getLintDiffViolations(after).filter( + (v) => + // Fatal errors are always new + v.level.toLowerCase() === "fatal" || + ((isFailure(v.level) || isWarning(v.level)) && + v.source?.length > 0 && + affectedSwaggers.has(relativizePath(v.source[0].document).slice(1))), + ); + + const [newItems, existingItems] = getNewItems(beforeViolations, afterViolations); + + const beforeReadmePath = before ? relative(before?.rootPath, before?.readme.path) : ""; + const afterReadmePath = relative(after.rootPath, after.readme.path); + + console.log("Correlation:"); + console.log(`\tBefore: Readme: ${beforeReadmePath} Tag: ${before?.tag}`); + console.log(`\tAfter: Readme : ${afterReadmePath} Tag: ${after.tag}`); + + newViolations.push(...newItems); + existingViolations.push(...existingItems); + } + + return [newViolations, existingViolations]; +} + +// Ensure that diagnostic information emitted by azure-openapi-validator +// to stdout is not interpreted as a diff occurrence. +// Assume that all messages emitted as "information" or "debug" or "verbose" are +// diagnostic output and hence can be excluded, and that all diagnostic output +// is emitted as "information" or "debug" or "verbose", so we don't have to +// exclude anything beyond these levels. +const diagnosticLevelStrings: string[] = [ + '"level":"information"', + '"level":"debug"', + '"level":"verbose"', +]; + +export function getLintDiffViolations(runResult: AutorestRunResult): LintDiffViolation[] { + const violations: LintDiffViolation[] = []; + + for (const line of (runResult.stderr + runResult.stdout).split("\n")) { + // TODO: Check for fatal autorest run errors and surface those + + if (!line.includes('"extensionName":"@microsoft.azure/openapi-validator"')) { + continue; + } + + // Ignore all lines that are "diagnostic" output from autorest + if (diagnosticLevelStrings.some((level) => line.includes(level))) { + continue; + } + + const result = JSON.parse(line.trim()); + if (result.code == undefined) { + // Results without a code can be assumed to be fatal errors. Set the code + // to "FATAL" + result.code = "FATAL"; + } + violations.push(result as LintDiffViolation); + } + + return violations; +} + +// This logic is duplicated from momentOfTruthPostProcessing.ts:140 +export function getNewItems( + before: LintDiffViolation[], + after: LintDiffViolation[], +): [LintDiffViolation[], LintDiffViolation[]] { + const newItems = []; + const existingItems = []; + + for (const afterViolation of after) { + let errorIsNew = true; + + // Always treat fatal errors as new + if (afterViolation.level.toLowerCase() === "fatal") { + newItems.push(afterViolation); + continue; + } + + // Search through "before" to find a matching violation + for (const beforeViolation of before) { + if ( + beforeViolation.level == afterViolation.level && + beforeViolation.code == afterViolation.code && + beforeViolation.message == afterViolation.message && + beforeViolation.source?.length == afterViolation.source?.length && + beforeViolation.source?.length && + afterViolation.source?.length && + isSameSources(beforeViolation.source, afterViolation.source) && + arrayIsEqual(beforeViolation.details?.jsonpath, afterViolation.details?.jsonpath) + ) { + errorIsNew = false; + existingItems.push(afterViolation); + + // Only need to find one match + break; + } + } + + // If no match is found, add to new + if (errorIsNew) { + newItems.push(afterViolation); + } + } + + return [newItems, existingItems]; +} + +export function isSameSources(a: Source[], b: Source[]) { + if (a?.length && b?.length) { + return basename(a?.[0]?.document) === basename(b?.[0]?.document); + } + return true; +} + +export function arrayIsEqual(a: any[], b: any[]) { + if (a.length !== b.length) { + return false; + } + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; +} diff --git a/eng/tools/lint-diff/src/generateReport.ts b/eng/tools/lint-diff/src/generateReport.ts index e23d563634d8..62f0019650e1 100644 --- a/eng/tools/lint-diff/src/generateReport.ts +++ b/eng/tools/lint-diff/src/generateReport.ts @@ -1,117 +1,56 @@ -import { basename, join, sep } from "path"; -import { readFile, writeFile } from "node:fs/promises"; +import { writeFile } from "node:fs/promises"; +import { relative } from "node:path"; import { kebabCase } from "change-case"; -import { getDefaultTag, getRelatedArmRpcFromDoc } from "./markdown-utils.js"; +import { getRelatedArmRpcFromDoc } from "./markdown-utils.js"; +import { getPathToDependency, getDependencyVersion, relativizePath } from "./util.js"; +import { getViolations } from "./correlateResults.js"; +import { isFailure, isWarning } from "./util.js"; import { AutorestRunResult, - pathExists, - getPathToDependency, - getDependencyVersion, -} from "./util.js"; - -// TODO: Name -export type BeforeAfter = { - // TODO: This is nullable - before: AutorestRunResult | null; - after: AutorestRunResult; -}; - -export type Source = { - document: string; - position: { - line: number; - // TODO: this is misspelled in momentOfTruthUtils.ts. Is this value ever - // properly populated? - colomn: number; - }; -}; - -export type LintingResultMessage = { - level: string; - code: string; - message: string; - source: Source[]; - validationCategory?: string; - details: { - jsonpath: (string | number)[]; - validationCategory?: string; - }; -}; - -export type LintDiffViolation = LintingResultMessage & { - groupName?: string; - filePath?: string; - lineNumber?: number; - armRpcs?: string[]; -}; + AutoRestMessage, + BeforeAfter, + LintDiffViolation, +} from "./lintdiff-types.js"; const LIMIT_50_MESSAGE = "Only 50 items are listed, please refer to log for more details."; -export async function generateReport( - beforePath: string, - beforeChecks: AutorestRunResult[], - afterChecks: AutorestRunResult[], +export async function generateLintDiffReport( + runCorrelations: Map, affectedSwaggers: Set, outFile: string, baseBranch: string, compareSha: string, + githubRepoPath: string, ): Promise { + console.log("Generating LintDiff report..."); + let pass = true; - const runCorrelations = await correlateRuns(beforePath, beforeChecks, afterChecks); + let outputMarkdown = ""; // See unifiedPipelineHelper.ts:386 // Compared specs (link to npm package: @microsoft.azure/openapi-validator/v/) const dependencyVersion = await getDependencyVersion( await getPathToDependency("@microsoft.azure/openapi-validator"), ); - let outputMarkdown = `| Compared specs ([v${dependencyVersion}](https://www.npmjs.com/package/@microsoft.azure/openapi-validator/v/${dependencyVersion})) | new version | base version |\n`; + outputMarkdown += `| Compared specs ([v${dependencyVersion}](https://www.npmjs.com/package/@microsoft.azure/openapi-validator/v/${dependencyVersion})) | new version | base version |\n`; outputMarkdown += `| --- | --- | --- |\n`; // Compared Specs | New Version | Base Version // | link: readme.md#tag- | link: readme.md#tag- // ... | ... | ... - for (const [_, { before, after }] of runCorrelations.entries()) { + for (const [, { before, after }] of runCorrelations.entries()) { const afterName = getName(after); const beforeName = before ? getName(before) : "default"; const afterPath = getPath(after); const beforePath = before ? getPath(before) : ""; - outputMarkdown += `| ${afterName} | link: [${afterName}](${getFileLink(compareSha, afterPath)}) | link: [${beforeName}](${getFileLink(baseBranch, beforePath)}) |\n`; + outputMarkdown += `| ${afterName} | [${afterName}](${getFileLink(githubRepoPath, compareSha, afterPath)}) | [${beforeName}](${getFileLink(githubRepoPath, baseBranch, beforePath)}) |\n`; } outputMarkdown += `\n\n`; - const newViolations: LintDiffViolation[] = []; - const existingViolations: LintDiffViolation[] = []; - - for (const [runKey, { before, after }] of runCorrelations.entries()) { - const beforeViolations = before - ? getLintDiffViolations(before).filter( - (v) => - (isFailure(v.level) || isWarning(v.level)) && - v.source?.length > 0 && - // TODO: Calculate relativized path outside of loop - affectedSwaggers.has(relativizePath(v.source[0].document).slice(1)), - ) - : []; - const afterViolations = getLintDiffViolations(after).filter( - (v) => - (isFailure(v.level) || isWarning(v.level)) && - v.source?.length > 0 && - // TODO: Calculate relativized path outside of loop - affectedSwaggers.has(relativizePath(v.source[0].document).slice(1)), - ); - - const [newitems, existingItems] = getNewItems(beforeViolations, afterViolations); - console.log(`Correlated Run: ${runKey}`); - console.log(`New violations: ${newitems.length}`); - console.log(`Existing violations: ${existingItems.length}`); - - newViolations.push(...newitems); - existingViolations.push(...existingItems); - } + const [newViolations, existingViolations] = getViolations(runCorrelations, affectedSwaggers); - console.log("Populating armRpcs for new violations"); for (const newItem of newViolations) { // TODO: Potential performance issue, make parallel newItem.armRpcs = await getRelatedArmRpcFromDoc(newItem.code); @@ -120,9 +59,8 @@ export async function generateReport( newViolations.sort(compareLintDiffViolations); existingViolations.sort(compareLintDiffViolations); + console.log(`New violations: ${newViolations.length}`); if (newViolations.length > 0) { - // New violations fail the build - pass = false; outputMarkdown += "**[must fix]The following errors/warnings are intorduced by current PR:**\n"; if (newViolations.length > 50) { outputMarkdown += `${LIMIT_50_MESSAGE}\n`; @@ -133,15 +71,24 @@ export async function generateReport( outputMarkdown += "| ---- | ------- | ------------------------------- |\n"; for (const violation of newViolations.slice(0, 50)) { - const { level, code, message } = violation; - outputMarkdown += `| ${iconFor(level)} [${code}](${getDocUrl(code)}) | ${message}
Location: [${getPathSegment(relativizePath(getFile(violation)))}#L${getLine(violation)}](${getFileLink(compareSha, relativizePath(getFile(violation)), getLine(violation))}) | ${violation.armRpcs?.join(", ")} |\n`; + outputMarkdown += getNewViolationReportRow(violation, githubRepoPath, compareSha); + } + + if (newViolations.some((v) => isFailure(v.level))) { + console.log("\t❌ At least one violation has error or fatal level. LintDiff will fail."); + // New violations with level error or fatal fail the build. If all new + // violations are warnings, the build passes. + pass = false; + } else { + console.log("\t✅ No new violations with error or fatal level. LintDiff will pass."); } + LogViolations("New violations list", newViolations); + outputMarkdown += "\n"; } - // The following errors/warnings exist before current PR submission - // Rule | Message | Location (link to file, line # at SHA) + console.log(`Existing violations: ${existingViolations.length}`); if (existingViolations.length > 0) { outputMarkdown += "**The following errors/warnings exist before current PR submission:**\n"; if (existingViolations.length > 50) { @@ -153,9 +100,11 @@ export async function generateReport( for (const violation of existingViolations.slice(0, 50)) { const { level, code, message } = violation; - outputMarkdown += `| ${iconFor(level)} [${code}](${getDocUrl(code)}) | ${message}
Location: [${getPathSegment(relativizePath(getFile(violation)))}#L${getLine(violation)}](${getFileLink(compareSha, relativizePath(getFile(violation)), getLine(violation))}) |\n`; + outputMarkdown += `| ${iconFor(level)} [${code}](${getDocUrl(code)}) | ${message}
Location: [${getPathSegment(relativizePath(getFile(violation)))}#L${getLine(violation)}](${getFileLink(githubRepoPath, compareSha, relativizePath(getFile(violation)), getLine(violation))}) |\n`; } + LogViolations("Existing violations list", existingViolations); + outputMarkdown += `\n`; } @@ -165,76 +114,49 @@ export async function generateReport( return pass; } -export async function correlateRuns( - beforePath: string, - beforeChecks: AutorestRunResult[], - afterChecks: AutorestRunResult[], -): Promise> { - const runCorrelations = new Map(); - for (const results of afterChecks) { - const { readme, tag } = results; - const key = tag ? `${readme}#${tag}` : readme; - if (runCorrelations.has(key)) { - throw new Error(`Duplicate key found correlating autorest runs: ${key}`); - } - - // Look for candidates matching readme and tag - const beforeCandidates = beforeChecks.filter((r) => r.readme === readme && r.tag === tag); - if (beforeCandidates.length === 1) { - runCorrelations.set(key, { - before: beforeCandidates[0], - after: results, - }); - continue; - } else if (beforeCandidates.length > 1) { - throw new Error(`Multiple before candidates found for key ${key}`); - } +function LogViolations(heading: string, violations: LintDiffViolation[]) { + console.log(`::group::${heading}`); + for (const violation of violations) { + const source = getFile(violation); + const line = getLine(violation); + console.log(`Violation: ${source}${line ? `:${line}` : ""}`); + console.log(` Level: ${violation.level}`); + console.log(` Code: ${violation.code}`); + console.log(` Message: ${violation.message}`); + } + console.log("::endgroup::"); +} - // Look for candidates with a matching default tag from the baseline - const beforeReadmePath = join(beforePath, readme); - if (await pathExists(beforeReadmePath)) { - const readmeContent = await readFile(beforeReadmePath, { encoding: "utf-8" }); - const defaultTag = getDefaultTag(readmeContent); - if (!defaultTag) { - throw new Error(`No default tag found for readme ${readme} in before state`); - } - const beforeDefaultTagCandidates = beforeChecks.filter( - (r) => r.readme === readme && r.tag === defaultTag, - ); - - if (beforeDefaultTagCandidates.length === 1) { - runCorrelations.set(key, { - before: beforeDefaultTagCandidates[0], - after: results, - }); - continue; - } else if (beforeDefaultTagCandidates.length > 1) { - throw new Error( - `Multiple before candidates found for key ${key} using default tag ${defaultTag}`, - ); - } - - // Look for candidates matching just the readme file - const beforeReadmeCandidate = beforeChecks.filter((r) => r.readme === readme); - if (beforeReadmeCandidate.length === 1) { - runCorrelations.set(key, { - before: beforeReadmeCandidate[0], - after: results, - }); - continue; - } else if (beforeReadmeCandidate.length > 1) { - throw new Error(`Multiple before candidates found for key ${key} using readme ${readme}`); - } +export async function generateAutoRestErrorReport( + autoRestErrors: { result: AutorestRunResult; errors: AutoRestMessage[] }[], + outFile: string, +) { + let outputMarkdown = ""; + + console.error("LintDiff detected AutoRest errors"); + outputMarkdown += "**AutoRest errors:**\n\n"; + for (const { result, errors } of autoRestErrors) { + console.log(`AutoRest errors for ${result.readme} (${result.tag})`); + + const readmePath = relative(result.rootPath, result.readme.path); + + outputMarkdown += `Readme: \`${readmePath}\`\n`; + outputMarkdown += `Tag: \`${result.tag}\`\n`; + outputMarkdown += "Errors:\n"; + outputMarkdown += "| Level | Message |\n"; + outputMarkdown += "| ----- | ------- |\n"; + for (const error of errors) { + const { level, message } = error; + console.log(` ${level}: ${message}`); + + outputMarkdown += `| ${iconFor(level)} ${level} | ${message.replace(/\n/g, "
")} |\n`; } - console.log(`No before candidates found for key ${key}, using no baseline`); - runCorrelations.set(key, { - before: null, - after: results, - }); + outputMarkdown += "\n\n"; } - return runCorrelations; + console.log(`Writing output to ${outFile}`); + await writeFile(outFile, outputMarkdown); } /** @@ -249,6 +171,10 @@ export function compareLintDiffViolations(a: LintDiffViolation, b: LintDiffViola return -1; } else if (isWarning(a.level) && isFailure(b.level)) { return 1; + } else if (a.level === "fatal" && b.level !== "fatal") { + return -1; + } else if (a.level !== "fatal" && b.level === "fatal") { + return 1; } const fileA = getFile(a) || ""; @@ -283,16 +209,21 @@ export function getPathSegment(path: string): string { return path.split("/").slice(-4).join("/"); } -export function getFileLink(sha: string, path: string, line: number | null = null) { +export function getFileLink( + repoPath: string, + sha: string, + path: string, + line: number | null = null, +) { // Paths can sometimes contain a preceeding slash if coming from a nomralized // filesystem path. In this case, remove it so the link doesn't contain two // forward slashes. const urlPath = path.startsWith("/") ? path.slice(1) : path; if (line === null) { - return `https://github.com/Azure/azure-rest-api-specs/blob/${sha}/${urlPath}`; + return `https://github.com/${repoPath}/blob/${sha}/${urlPath}`; } - return `https://github.com/Azure/azure-rest-api-specs/blob/${sha}/${urlPath}#L${line}`; + return `https://github.com/${repoPath}/blob/${sha}/${urlPath}#L${line}`; } export function getDocUrl(id: string) { @@ -303,21 +234,6 @@ export function getDocUrl(id: string) { return `https://github.com/Azure/azure-openapi-validator/blob/main/docs/${kebabCase(id)}.md`; } -/** - * Normalize a path to be relative to a given directory. - * @param path File path with separators from the current system - * @param from A directory name to treat as the root (e.g. /specification/) - */ -// TODO: Review use of sep -export function relativizePath(path: string, from: string = `${sep}specification${sep}`): string { - const indexOfBy = path.lastIndexOf(from); - if (indexOfBy === -1) { - return path; - } - - return path.substring(indexOfBy); -} - export function getFile(lintDiffViolation: LintDiffViolation): string { return lintDiffViolation.source?.[0]?.document || ""; } @@ -331,112 +247,26 @@ export function getLine(lintDiffViolation: LintDiffViolation): number | undefine return undefined; } -export function iconFor(type: string) { - if (type.toLowerCase().includes("error")) { - return ":x:"; - } else { - return ":warning:"; - } -} - -// Ensure that diagnostic information emitted by azure-openapi-validator -// to stdout is not interpreted as a diff occurrence. -// Assume that all messages emitted as "information" or "debug" or "verbose" are -// diagnostic output and hence can be excluded, and that all diagnostic output -// is emitted as "information" or "debug" or "verbose", so we don't have to -// exclude anything beyond these levels. -const diagnosticLevelStrings: string[] = [ - '"level":"information"', - '"level":"debug"', - '"level":"verbose"', -]; - -export function getLintDiffViolations(runResult: AutorestRunResult): LintDiffViolation[] { - const violations: LintDiffViolation[] = []; - - for (const line of (runResult.stderr + runResult.stdout).split("\n")) { - // TODO: Check for fatal autorest run errors and surface those - - if (!line.includes('"extensionName":"@microsoft.azure/openapi-validator"')) { - continue; - } - - // Ignore all lines that are "diagnostic" output from autorest - if (diagnosticLevelStrings.some((level) => line.includes(level))) { - continue; - } - - const result = JSON.parse(line.trim()); - if (result.code == undefined) { - // Here we are assuming that lack of code denotes fatal error. For example [1]. - // We must set this to some value because if it would stay undefined, the downstream code would blow up in several places. - result.code = "FATAL"; - } - violations.push(result as LintDiffViolation); - } - - return violations; -} - -export function isFailure(level: string) { - return ["error", "fatal"].includes(level.toLowerCase()); -} - -export function isWarning(level: string) { - return level.toLowerCase() === "warning"; -} - -// This logic is duplicated from momentOfTruthPostProcessing.ts:140 -export function getNewItems( - before: LintDiffViolation[], - after: LintDiffViolation[], -): [LintDiffViolation[], LintDiffViolation[]] { - const newItems = []; - const existingItems = []; - - for (const afterViolation of after) { - let errorIsNew = true; - - // Always treat fatal errors as new - if (afterViolation.level.toLowerCase() === "fatal") { - newItems.push(afterViolation); - continue; - } - - // Search through "before" to find a matching violation - for (const beforeViolation of before) { - if ( - beforeViolation.level == afterViolation.level && - beforeViolation.code == afterViolation.code && - beforeViolation.message == afterViolation.message && - beforeViolation.source?.length == afterViolation.source?.length && - beforeViolation.source?.length && - afterViolation.source?.length && - isSameSources(beforeViolation.source, afterViolation.source) && - arrayIsEqual(beforeViolation.details?.jsonpath, afterViolation.details?.jsonpath) - ) { - errorIsNew = false; - existingItems.push(afterViolation); - - // Only need to find one match - break; - } - } - - // If no match is found, add to new - if (errorIsNew) { - newItems.push(afterViolation); - } +function getNewViolationReportRow( + violation: LintDiffViolation, + githubRepoPath: string, + compareSha: string, +): string { + const { level, code, message } = violation; + if (level.toLowerCase() == "fatal") { + // Fatal errors have fewer details and don't need to be formatted + return `| ${iconFor(level)} ${code} | ${message} | ${violation.armRpcs?.join(", ")} |\n`; } - return [newItems, existingItems]; + return `| ${iconFor(level)} [${code}](${getDocUrl(code)}) | ${message}
Location: [${getPathSegment(relativizePath(getFile(violation)))}#L${getLine(violation)}](${getFileLink(githubRepoPath, compareSha, relativizePath(getFile(violation)), getLine(violation))}) | ${violation.armRpcs?.join(", ")} |\n`; } -export function isSameSources(a: Source[], b: Source[]) { - if (a?.length && b?.length) { - return basename(a?.[0]?.document) === basename(b?.[0]?.document); +export function iconFor(type: string) { + if (type.toLowerCase().includes("error") || type.toLowerCase() === "fatal") { + return ":x:"; + } else { + return ":warning:"; } - return true; } export function getName(result: AutorestRunResult) { @@ -444,18 +274,7 @@ export function getName(result: AutorestRunResult) { } export function getPath(result: AutorestRunResult) { - const { readme, tag } = result; - return tag ? `${readme}#tag-${tag}` : readme; -} - -export function arrayIsEqual(a: any[], b: any[]) { - if (a.length !== b.length) { - return false; - } - for (let i = 0; i < a.length; i++) { - if (a[i] !== b[i]) { - return false; - } - } - return true; + const { rootPath, readme, tag } = result; + const readmePathRelative = relative(rootPath, readme.path); + return tag ? `${readmePathRelative}#tag-${tag}` : readmePathRelative; } diff --git a/eng/tools/lint-diff/src/lint-diff.ts b/eng/tools/lint-diff/src/lint-diff.ts index 30ad35343288..a597dc21eff1 100644 --- a/eng/tools/lint-diff/src/lint-diff.ts +++ b/eng/tools/lint-diff/src/lint-diff.ts @@ -1,8 +1,11 @@ import { parseArgs, ParseArgsConfig } from "node:util"; -import { pathExists } from "./util.js"; +import { pathExists, getDependencyVersion, getPathToDependency } from "./util.js"; import { getRunList } from "./processChanges.js"; -import { runChecks } from "./runChecks.js"; -import { generateReport } from "./generateReport.js"; +import { runChecks, getAutorestErrors } from "./runChecks.js"; +import { correlateRuns } from "./correlateResults.js"; +import { generateAutoRestErrorReport, generateLintDiffReport } from "./generateReport.js"; +import { writeFile } from "node:fs/promises"; +import { SpecModelError } from "@azure-tools/specs-shared/spec-model-error"; function usage() { console.log("TODO: Write up usage"); @@ -40,6 +43,11 @@ export async function main() { short: "m", default: "main", }, + "github-repo-path": { + type: "string", + short: "r", + default: process.env.GITHUB_REPOSITORY || "Azure/azure-rest-api-specs", + }, }, strict: true, }; @@ -52,6 +60,7 @@ export async function main() { "out-file": outFile, "base-branch": baseBranch, "compare-sha": compareSha, + "github-repo-path": githubRepoPath, }, } = parseArgs(config); @@ -77,14 +86,10 @@ export async function main() { process.exit(1); } - // const versionResult = await executeCommand("npm exec -- autorest --version"); - // if (versionResult.error) { - // console.error("Error running autorest --version", versionResult.error); - // process.exit(1); - // } - - // console.log("Autorest version:"); - // console.log(versionResult.stdout); + const validatorVersion = await getDependencyVersion( + await getPathToDependency("@microsoft.azure/openapi-validator"), + ); + console.log(`Using @microsoft.azure/openapi-validator version: ${validatorVersion}\n`); await runLintDiff( beforeArg as string, @@ -93,6 +98,7 @@ export async function main() { outFile as string, baseBranch as string, compareSha as string, + githubRepoPath as string, ); } @@ -103,30 +109,80 @@ async function runLintDiff( outFile: string, baseBranch: string, compareSha: string, + githubRepoPath: string, ) { - const [beforeList, afterList, affectedSwaggers] = await getRunList( - beforePath, - afterPath, - changedFilesPath, - ); + let beforeList, afterList, affectedSwaggers; + try { + [beforeList, afterList, affectedSwaggers] = await getRunList( + beforePath, + afterPath, + changedFilesPath, + ); + } catch (error) { + if (error instanceof SpecModelError) { + console.log("\n\n"); + console.log("❌ Error building Spec Model from changed file list:"); + console.log(`${error}`); + console.log("Ensure input files and references are valid."); + + process.exitCode = 1; + return; + } + + throw error; + } + + if (beforeList.size === 0 && afterList.size === 0) { + await writeFile(outFile, "No changes found. Exiting."); + console.log("No changes found. Exiting."); + return; + } + + if (afterList.size === 0) { + await writeFile(outFile, "No applicable files found in after. Exiting."); + console.log("No applicable files found in after. Exiting."); + return; + } // It may be possible to run these in parallel as they're running against // different directories. const beforeChecks = await runChecks(beforePath, beforeList); const afterChecks = await runChecks(afterPath, afterList); - const pass = await generateReport( - beforePath, - beforeChecks, - afterChecks, + // If afterChecks has AutoRest errors, fail the run. + const autoRestErrors = afterChecks + .map((result) => { + return { result, errors: getAutorestErrors(result) }; + }) + .filter((result) => result.errors.length > 0); + if (autoRestErrors.length > 0) { + generateAutoRestErrorReport(autoRestErrors, outFile); + console.log("AutoRest errors found. See workflow summary for details."); + + process.exitCode = 1; + console.error(`AutoRest errors found. See workflow summary report in ${outFile} for details.`); + return; + } + + const runCorrelations = await correlateRuns(beforePath, beforeChecks, afterChecks); + + const pass = await generateLintDiffReport( + runCorrelations, affectedSwaggers, outFile, baseBranch, compareSha, + githubRepoPath, ); - if (!pass) { + if (!pass) { process.exitCode = 1; console.error(`Lint-diff failed. See workflow summary report in ${outFile} for details.`); } + + if (process.env.GITHUB_SERVER_URL && process.env.GITHUB_REPOSITORY && process.env.GITHUB_RUN_ID) { + console.log( + `See workflow summary at: ${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`, + ); + } } diff --git a/eng/tools/lint-diff/src/lintdiff-types.ts b/eng/tools/lint-diff/src/lintdiff-types.ts new file mode 100644 index 000000000000..6515cbe3c760 --- /dev/null +++ b/eng/tools/lint-diff/src/lintdiff-types.ts @@ -0,0 +1,63 @@ +import { Readme } from "@azure-tools/specs-shared/readme"; +import { ExecException } from "node:child_process"; + +// TODO: Reduce to minimal set of properties +export type AutorestRunResult = { + rootPath: string; + readme: Readme; + tag: string; + + error: ExecException | null; + stdout: string; + stderr: string; +}; + +export interface AutoRestMessage { + level: "information" | "warning" | "error" | "debug" | "verbose" | "fatal"; + code?: any; + message: string; + readme?: string; + tag?: string; + groupName?: string; +} + +// TODO: Name +export type BeforeAfter = { + // TODO: This is nullable + before: AutorestRunResult | null; + after: AutorestRunResult; +}; + +export type Source = { + document: string; + position: { + line: number; + // TODO: this is misspelled in momentOfTruthUtils.ts. Is this value ever + // properly populated? + colomn: number; + }; +}; + +export type LintingResultMessage = { + level: string; + code: string; + message: string; + source: Source[]; + validationCategory?: string; + details: { + jsonpath: (string | number)[]; + validationCategory?: string; + }; +}; + +export type LintDiffViolation = LintingResultMessage & { + groupName?: string; + filePath?: string; + lineNumber?: number; + armRpcs?: string[]; +}; + +export type ReadmeAffectedTags = { + readme: Readme; + changedTags: Set; +}; diff --git a/eng/tools/lint-diff/src/markdown-utils.ts b/eng/tools/lint-diff/src/markdown-utils.ts index 4acde2720575..20d2a5e54d35 100644 --- a/eng/tools/lint-diff/src/markdown-utils.ts +++ b/eng/tools/lint-diff/src/markdown-utils.ts @@ -1,13 +1,7 @@ -import * as commonmark from "commonmark"; -import { readFile } from "fs/promises"; - -// TODO: Can this be eliminated? -import { parseMarkdown } from "@azure-tools/openapi-tools-common"; -// TODO: Can this be eliminated? -import * as amd from "@azure/openapi-markdown"; +import { marked } from "marked"; import { kebabCase } from "change-case"; import axios from "axios"; -import * as YAML from "js-yaml"; +import { Readme } from "@azure-tools/specs-shared/readme"; export enum MarkdownType { Arm = "arm", @@ -17,39 +11,24 @@ export enum MarkdownType { /** * - * @param markdownFile Path to the markdown file to parse + * @param readme Readme object to extract * @returns {Promise} The type of OpenAPI spec in the markdown file, or "default" if not found */ -export async function getOpenapiType(markdownFile: string): Promise { - let markdownContent = await readFile(markdownFile, { encoding: "utf-8" }); - for (const codeBlock of parseCodeblocks(markdownContent)) { - if ( - !codeBlock.info || - codeBlock.info.trim().toLocaleLowerCase() !== "yaml" || - !codeBlock.literal - ) { - continue; - } - - let lines = codeBlock.literal.trim().split("\n"); - - for (const line of lines) { - if (line.trim().startsWith("openapi-type:")) { - let openapiType = line.trim().split(":")[1].trim().toLowerCase(); - - if (Object.values(MarkdownType).includes(openapiType as MarkdownType)) { - return openapiType as MarkdownType; - } - } - } +// TODO: Should this be placed in the Readme class? +export async function getOpenapiType(readme: Readme): Promise { + const openapiType = ((await readme.getGlobalConfig()) as { "openapi-type"?: string })[ + "openapi-type" + ]; + if (openapiType && Object.values(MarkdownType).includes(openapiType as MarkdownType)) { + return openapiType as MarkdownType; } // Fallback, no openapi-type found in the file. Look at path to determine type // resource-manager: Arm // data-plane: DataPlane - if (markdownFile.match(/.*specification\/.*\/resource-manager\/.*readme.md$/g)) { + if (readme.path.match(/.*specification\/.*\/resource-manager\/.*readme.md$/g)) { return MarkdownType.Arm; - } else if (markdownFile.match(/.*specification\/.*\/data-plane\/.*readme.md$/g)) { + } else if (readme.path.match(/.*specification\/.*\/data-plane\/.*readme.md$/g)) { return MarkdownType.DataPlane; } @@ -57,51 +36,8 @@ export async function getOpenapiType(markdownFile: string): Promise { - const parsed = parseCommonmark(markdown); - const walker = parsed.walker(); - let event; - while ((event = walker.next())) { - const node = event.node; - if (event.entering && node.type === "code_block") { - yield node; - } - } -} - -// TODO: Direct copy/paste from utils, factor appropriately -function parseCommonmark(markdown: string): commonmark.Node { - return new commonmark.Parser().parse(markdown); -} - -/** - * Returns all tags from the given readme document - * @param readMeContent - * @returns - */ -export function getAllTags(readMeContent: string): string[] { - const cmd = parseMarkdown(readMeContent); - const allTags = new amd.ReadMeManipulator( - { error: (_msg: string) => {} }, - new amd.ReadMeBuilder(), - ).getAllTags(cmd); - return [...allTags]; -} - type TagInputFile = { tagName: string; inputFiles: readonly string[] }; -export function getTagsAndInputFiles(tags: string[], readmeContent: string): TagInputFile[] { - const tagResults: TagInputFile[] = []; - for (const tag of tags) { - const inputFiles = getInputFiles(readmeContent, tag); - if (inputFiles.length > 0) { - tagResults.push({ tagName: tag, inputFiles }); - } - } - return tagResults; -} - /** * Given a list of tags and the content of a readme file, remove tags that are * subsets of other tags (reduces number of times autorest is called). @@ -125,12 +61,6 @@ export function deduplicateTags(tagInfo: TagInputFile[]) { .map((tag) => tag.tagName); } -export function getInputFiles(readMeContent: string, tag: string): readonly string[] { - const cmd = parseMarkdown(readMeContent); - const inputFiles = amd.getInputFilesForTag(cmd.markDown, tag); - return inputFiles || []; -} - export function getDocRawUrl(code: string) { if (code == "FATAL") { return `N/A`; @@ -143,7 +73,6 @@ export function getDocRawUrl(code: string) { const rpcInfoCache = new Map(); -// TODO: Tests export async function getRelatedArmRpcFromDoc(ruleName: string): Promise { if (ruleName == "FATAL") { return []; @@ -164,58 +93,34 @@ export async function getRelatedArmRpcFromDoc(ruleName: string): Promise= 1 && + token.text.trim().toLowerCase() === "related arm guideline code" ) { - const next = node.next; - if (next?.type == "list") { - let currentItem = next.firstChild; - while (currentItem) { - const code = currentItem?.firstChild?.firstChild?.literal; - if (code) { - rpcRules.push(...code.split(",").map((c) => c.trim())); + // The next token should be a list + const next = tokens[i + 1]; + if (next && next.type === "list" && Array.isArray(next.items)) { + for (const item of next.items) { + // item.text may contain comma-separated codes + if (typeof item.text === "string") { + rpcRules.push(...item.text.split(",").map((c: string) => c.trim())); } - currentItem = currentItem.next; } } break; } } + rpcInfoCache.set(ruleName, rpcRules); return rpcRules; } -export function getDefaultTag(markdownContent: string): string { - const parsed = parseMarkdown(markdownContent); - const startNode = parsed.markDown; - const codeBlockMap = amd.getCodeBlocksAndHeadings(startNode); - - const latestHeader = "Basic Information"; - - const lh = codeBlockMap[latestHeader]; - if (lh) { - const latestDefinition = YAML.load(lh.literal!) as undefined | { tag: string }; - if (latestDefinition) { - return latestDefinition.tag; - } - } else { - for (let idx of Object.keys(codeBlockMap)) { - const lh = codeBlockMap[idx]; - if (!lh || !lh.info || lh.info.trim().toLocaleLowerCase() !== "yaml") { - continue; - } - const latestDefinition = YAML.load(lh.literal!) as undefined | { tag: string }; - - if (latestDefinition) { - return latestDefinition.tag; - } - } - } - return ""; +export async function getDefaultTag(readme: Readme): Promise { + const tag = ((await readme.getGlobalConfig()) as { tag?: string }).tag; + return tag ? tag : ""; } diff --git a/eng/tools/lint-diff/src/processChanges.ts b/eng/tools/lint-diff/src/processChanges.ts index 035ecc9dd9bc..c57bec9648b2 100644 --- a/eng/tools/lint-diff/src/processChanges.ts +++ b/eng/tools/lint-diff/src/processChanges.ts @@ -1,21 +1,19 @@ -import { join, dirname, sep } from "path"; -import { readFile, readdir } from "fs/promises"; +import { join, relative, resolve, sep } from "path"; +import { readFile } from "fs/promises"; import { pathExists } from "./util.js"; -import { specification } from "../../../../.github/src/changed-files.js"; - -import { - getAllTags, - getInputFiles, - getTagsAndInputFiles, - deduplicateTags, -} from "./markdown-utils.js"; +import { specification, readme, swagger } from "@azure-tools/specs-shared/changed-files"; +import { SpecModel } from "@azure-tools/specs-shared/spec-model"; +import { ReadmeAffectedTags } from "./lintdiff-types.js"; +import deepEqual from "deep-eql"; + +import { deduplicateTags } from "./markdown-utils.js"; import $RefParser from "@apidevtools/json-schema-ref-parser"; export async function getRunList( beforePath: string, afterPath: string, changedFilesPath: string, -): Promise<[Map, Map, Set]> { +): Promise<[Map, Map, Set]> { // Forward slashes are OK list coming from changedFilesPath is from git which // always uses forward slashes as path separators @@ -38,20 +36,45 @@ export async function getRunList( // In the future, the loop involving [beforePath, afterPath] can be eliminated // as well as beforeState - const [beforeState, _] = await buildState(changedSpecFiles, beforePath); + const [beforeState] = await buildState(changedSpecFiles, beforePath); const [afterState, afterSwaggers] = await buildState(changedSpecFiles, afterPath); - const affectedSwaggers = new Set(afterSwaggers); - - console.log(`affected swaggers: ${[...affectedSwaggers].join(", ")}`); + const affectedSwaggerCandidates = new Set(afterSwaggers); const [beforeTagMap, afterTagMap] = reconcileChangedFilesAndTags(beforeState, afterState); + const affectedSwaggers = await getChangedSwaggers( + beforePath, + afterPath, + affectedSwaggerCandidates, + ); + + console.log("Before readme and tags:"); + console.table( + [...beforeTagMap].map(([readme, tags]) => ({ readme, tags: [...tags.changedTags] })), + ["readme", "tags"], + ); + console.log("\n"); + + console.log("After readme and tags:"); + console.table( + [...afterTagMap].map(([readme, tags]) => ({ readme, tags: [...tags.changedTags] })), + ["readme", "tags"], + ); + console.log("\n"); + + console.log("Affected swaggers:"); + console.table( + [...affectedSwaggers].map((swagger) => ({ swagger })), + ["swagger"], + ); + console.log("\n"); + return [beforeTagMap, afterTagMap, affectedSwaggers]; } export async function buildState( changedSpecFiles: string[], rootPath: string, -): Promise<[Map, string[]]> { +): Promise<[Map, string[]]> { // Filter changed files to include only those that exist in the rootPath const existingChangedFiles = []; for (const file of changedSpecFiles) { @@ -64,78 +87,88 @@ export async function buildState( // e.g. specification/service1/readme.md -> specification/service1 const affectedServiceDirectories = await getAffectedServices(existingChangedFiles); - // Get a map of a service's swagger files and their dependencies - // TODO: Use set or array? - const affectedSwaggerMap = new Map(); + // Build service models of affected services + const specModels = new Map(); for (const serviceDir of affectedServiceDirectories) { - const changedServiceFiles = existingChangedFiles.filter((file) => file.startsWith(serviceDir)); - const serviceDependencyMap = await getSwaggerDependenciesMap(rootPath, serviceDir); - - affectedSwaggerMap.set( - serviceDir, - await getAffectedSwaggers(changedServiceFiles, serviceDependencyMap), - ); + const specModel = new SpecModel(resolve(rootPath, serviceDir)); + specModels.set(serviceDir, specModel); } - // Use changedSpecFiles (which might not be present in the branch) to get - // a set of affected readme files. - const affectedReadmes = await getAffectedReadmes(changedSpecFiles, rootPath); - - const readmeTags = new Map>(); - for (const readme of affectedReadmes) { - const readmeService = await getService(readme); - if (!affectedSwaggerMap.has(readmeService)) { - continue; - } - - // TODO: The parser is used twice to get tags and input files. This can be - // made more efficient. - const readmeContent = await readFile(join(rootPath, readme), { encoding: "utf-8" }); - for (const tag of getAllTags(readmeContent)) { - const inputFiles = getInputFiles(readmeContent, tag).map((file) => - join(dirname(readme), file), - ); - if (inputFiles === undefined || inputFiles.length === 0) { - continue; - } - - // Readme + Tag Combo + // Build a map of readme.md files and tags affected by the changed files + const readmeTags = new Map(); + for (const changedSwagger of existingChangedFiles.filter(swagger)) { + const specModel = specModels.get(getService(changedSwagger))!; + const affectedReadmes = await specModel.getAffectedReadmeTags( + resolve(rootPath, changedSwagger), + ); - // TODO: ensure ! is correct to do here - for (const swagger of affectedSwaggerMap.get(readmeService)!) { - if (inputFiles.includes(swagger)) { - if (!readmeTags.has(readme)) { - readmeTags.set(readme, new Set()); - } - readmeTags.get(readme)?.add(tag); - } + for (const [readmePath, tags] of affectedReadmes) { + const affectedTags = readmeTags.get(readmePath) ?? { + readme: (await specModel.getReadmes()).get(readmePath)!, + changedTags: new Set(), + }; + for (const [tagName] of tags) { + affectedTags.changedTags.add(tagName); } + readmeTags.set(readmePath, affectedTags); } } - // TODO: Deduplicate inside or outside state building? It's possible that - // later processing like that in reconcileChangedFilesAndTags - const changedFileAndTagsMap = new Map(); - for (const [readme, tags] of readmeTags.entries()) { - const dedupedTags = deduplicateTags( - await getTagsAndInputFiles( - [...tags], - await readFile(join(rootPath, readme), { encoding: "utf-8" }), - ), - ); + // Deduplicate tags in readme files + const changedFileAndTagsMap = new Map(); + for (const [readmeFile, tags] of readmeTags.entries()) { + const tagMap = await tags.readme.getTags(); + const tagsAndInputFiles = [...tags.changedTags].map((changedTag) => { + return { + tagName: changedTag, + inputFiles: [...tagMap.get(changedTag)!.inputFiles.keys()], + }; + }); - changedFileAndTagsMap.set(readme, dedupedTags); + const dedupedTags = deduplicateTags(tagsAndInputFiles); + changedFileAndTagsMap.set(relative(rootPath, readmeFile), { + readme: tags.readme, + changedTags: new Set(dedupedTags), + }); } // For readme files that have changed but there are no affected swaggers, // add them to the map with no tags - for (const changedReadme of affectedReadmes) { + for (const changedReadme of existingChangedFiles.filter(readme)) { + const readmePath = resolve(rootPath, changedReadme); + + // Skip readme.md files that don't have "input-file:" as autorest cannot + // scan them. + const readmeContent = await readFile(readmePath, { encoding: "utf-8" }); + if (!readmeContent.includes("input-file:")) { + continue; + } + + const service = specModels.get(getService(changedReadme))!; + const readmes = await service.getReadmes(); + const readmeObject = readmes.get(readmePath)!; + if (!changedFileAndTagsMap.has(changedReadme)) { - changedFileAndTagsMap.set(changedReadme, []); + changedFileAndTagsMap.set(changedReadme, { + readme: readmeObject, + changedTags: new Set(), + }); + } + } + + const affectedSwaggers = new Set(); + for (const changedSwagger of existingChangedFiles.filter(swagger)) { + const service = getService(changedSwagger); + const swaggerSet = await specModels + .get(service)! + .getAffectedSwaggers(resolve(rootPath, changedSwagger)); + for (const swaggerPath of swaggerSet.keys()) { + affectedSwaggers.add(relative(rootPath, swaggerPath)); } } - return [changedFileAndTagsMap, Array.from(affectedSwaggerMap.values()).flat()]; + // Return list of affected swagger files + return [changedFileAndTagsMap, [...affectedSwaggers]]; } /** @@ -146,18 +179,18 @@ export async function buildState( * @returns maps of readme files and tags to scan */ export function reconcileChangedFilesAndTags( - before: Map, - after: Map, -): Map[] { - const beforeFinal = new Map(); - const afterFinal = new Map(); + before: Map, + after: Map, +): Map[] { + const beforeFinal = new Map(); + const afterFinal = new Map(); // Clone the maps so that changes to maps do not affect original object for (const [readme, tags] of before.entries()) { - beforeFinal.set(readme, [...tags]); + beforeFinal.set(readme, tags); } for (const [readme, tags] of after.entries()) { - afterFinal.set(readme, [...tags]); + afterFinal.set(readme, tags); } // If a tag is deleted in after and exists in before, do NOT scan the tag @@ -168,11 +201,11 @@ export function reconcileChangedFilesAndTags( continue; } - const afterTags = new Set(afterFinal.get(readme)!); - beforeFinal.set( - readme, - tags.filter((tag) => afterTags.has(tag)), - ); + const afterTags = new Set([...afterFinal.get(readme)!.changedTags]); + beforeFinal.set(readme, { + readme: tags.readme, + changedTags: new Set([...tags.changedTags].filter((t) => afterTags.has(t))), + }); } return [beforeFinal, afterFinal]; @@ -214,121 +247,6 @@ export async function getAffectedServices(changedFiles: string[]) { return affectedServices; } -/** - * Build a list of swagger dependencies for a given directory. Only list - * dependencies that are in the same "directory". - * - * @param rootPath The root path of the repo - * @param directory The directory (generally, service directory) to search - * @returns A map of swagger files to the files upon which they depend - */ -export async function getSwaggerDependenciesMap( - rootPath: string, - directory: string, -): Promise>>> { - const swaggerFiles = await enumerateFiles(rootPath, directory, ".json"); - const swaggerDependencies = new Map>(); - const rootAndDirectoryPath = join(rootPath, directory); - - for (const file of swaggerFiles) { - let parsedRefs = await $RefParser.resolve(join(rootPath, file), { - resolve: { http: false }, - }); - // TODO: filter should exclude URLs - const refs = parsedRefs - .paths() - .filter( - (ref: string) => - ref.startsWith(rootAndDirectoryPath) && // Inside the target directory - !ref.includes(`/examples/`), // Exclude examples - ) - // TODO: +1 requires proper handling of trailing slashes - .map((ref: string) => ref.substring(rootPath.length + 1)) // Relative to rootPath - .filter((ref: string) => ref !== file); // Exclude self-reference - - swaggerDependencies.set(file, new Set(refs)); - } - - return swaggerDependencies; -} - -/** - * Given a list of changed files and a map of swagger dependencies, return a - * list of affected swagger files that depend on the given set of changed files. - * @param changedFiles - * @param dependencies a map of swagger files to the files upon which those swaggers depend - * @returns - */ -export function getAffectedSwaggers( - changedFiles: string[], - dependencies: Map>, -): string[] { - const affectedSwaggers = new Set(changedFiles); - - for (const changedFile of changedFiles) { - for (const [file, deps] of dependencies.entries()) { - if (affectedSwaggers.has(file)) { - continue; - } - - if (deps.has(changedFile)) { - affectedSwaggers.add(file); - } - } - } - - return [...affectedSwaggers]; -} - -/** - * Get the list of readme files that are affected by the changed files by - * searching for readme files in each higher directory up to the "specification/" - * directory. - * - * @param changedFiles List of changed files. - */ -export async function getAffectedReadmes( - changedFiles: string[], - repoRoot: string, -): Promise { - // OK to use / because changedFiles comes from git which always uses / - const changedFilesInSpecDir = changedFiles.filter((file) => file.startsWith(`specification/`)); - - const changedReadmeFiles = []; - for (const file of changedFilesInSpecDir) { - if (file.toLowerCase().endsWith(`/readme.md`) && (await pathExists(join(repoRoot, file)))) { - changedReadmeFiles.push(file); - } - } - - const changedSpecFiles = changedFilesInSpecDir.filter((f) => - [".md", ".json", ".yaml", ".yml"].some((p) => f.toLowerCase().endsWith(p)), - ); - - const readmeFiles = new Set(changedReadmeFiles); - const visitedFolders = new Set(); - - // For each changed spec file, walk up the directory tree collecting readme - // files until reaching the "specification/" directory (already filtered in - // changedFilesInSpecDir) - for (const specFile of changedSpecFiles) { - let dir = dirname(specFile); - // Exclude '.' as it is outside the specification folder for purposes of - // this function (avoid including root readme.md file) - while (!visitedFolders.has(dir) && dir !== "specification") { - visitedFolders.add(dir); - // TODO: Case sensitivity?? - const readmeFile = join(repoRoot, dir, "readme.md"); - if (await pathExists(readmeFile)) { - readmeFiles.add(join(dir, "readme.md")); - } - dir = dirname(dir); - } - } - - return [...readmeFiles]; -} - /** * Returns the service of a file path of the form "specification/" * @param filePath Path to a file of the form "specification//.../file.json" @@ -345,31 +263,51 @@ export function getService(filePath: string): string { } /** - * Given a root path and directory, find all files whose name ends with a given - * string. (e.g. ".json") - * @param rootPath Root path of starting directory - * @param dir Starting directory - * @param endsWith a file extension to search for - * @returns A list of files with paths relative to rootPath + * Return true if the path contains "/examples/" + * @param path + * @returns */ -async function enumerateFiles(rootPath: string, dir: string, endsWith: string): Promise { - let results: string[] = []; - let stack: string[] = [dir]; - - while (stack.length > 0) { - const currentDir = stack.pop()!; - const list = await readdir(join(rootPath, currentDir), { withFileTypes: true }); - - for (const file of list) { - if (file.isDirectory()) { - stack.push(join(currentDir, file.name)); - } else { - if (file.name.endsWith(endsWith)) { - results.push(join(currentDir, file.name)); - } - } +const excludeExamples = (path: string) => path.includes("/examples/"); + +/** + * Given a list of swagger files relatve to before and after root paths, + * return a set of swagger files that have changed. Changes can be directly in + * the swagger file or in part of a referenced file that is included. Not all + * changes to a file in a $ref will affect a given swagger file. + * @param beforeRoot + * @param afterRoot + * @param affectedSwaggerCandidates + * @returns + */ +export async function getChangedSwaggers( + beforeRoot: string, + afterRoot: string, + affectedSwaggerCandidates: Set, +) { + const affectedSwaggers = new Set(); + + for (const swagger of affectedSwaggerCandidates) { + const beforeSwagger = join(beforeRoot, swagger); + if (!(await pathExists(beforeSwagger))) { + affectedSwaggers.add(swagger); + continue; + } + + const afterSwagger = join(afterRoot, swagger); + + // Using dereference which supports excluding $ref paths (in this case, examples) + const derefBefore = await $RefParser.dereference(beforeSwagger, { + dereference: { excludedPathMatcher: excludeExamples }, + }); + const derefAfter = await $RefParser.dereference(afterSwagger, { + dereference: { excludedPathMatcher: excludeExamples }, + }); + + // Compare the dereferenced objects + if (!deepEqual(derefBefore, derefAfter)) { + affectedSwaggers.add(swagger); } } - return results; + return affectedSwaggers; } diff --git a/eng/tools/lint-diff/src/runChecks.ts b/eng/tools/lint-diff/src/runChecks.ts index 512fab0a8110..887ec894c124 100644 --- a/eng/tools/lint-diff/src/runChecks.ts +++ b/eng/tools/lint-diff/src/runChecks.ts @@ -1,24 +1,29 @@ import { join } from "path"; -import { exec, ExecException } from "node:child_process"; +import { execNpmExec, isExecError, ExecError } from "@azure-tools/specs-shared/exec"; +import { debugLogger } from "@azure-tools/specs-shared/logger"; +import { getPathToDependency, isFailure } from "./util.js"; +import { AutoRestMessage, AutorestRunResult } from "./lintdiff-types.js"; +import { ReadmeAffectedTags } from "./lintdiff-types.js"; import { getOpenapiType } from "./markdown-utils.js"; -import { getPathToDependency, AutorestRunResult } from "./util.js"; const MAX_EXEC_BUFFER = 64 * 1024 * 1024; +// AutoRest messages are JSON objects that start with the string '{"level":' +// Non-AutoRest messages will start with things like '{"pluginName"' +const AUTOREST_ERROR_PREFIX = '{"level":'; + export async function runChecks( path: string, - runList: Map, + runList: Map, ): Promise { const dependenciesDir = await getPathToDependency("@microsoft.azure/openapi-validator"); const result: AutorestRunResult[] = []; for (const [readme, tags] of runList.entries()) { const changedFilePath = join(path, readme); - console.log(`Linting ${changedFilePath}`); - // TODO: Move this into getRunList - let openApiType = await getOpenapiType(changedFilePath); + let openApiType = await getOpenapiType(tags.readme); // From momentOfTruth.ts:executeAutoRestWithLintDiff // This is a quick workaround for https://github.com/Azure/azure-sdk-tools/issues/6549 @@ -28,56 +33,91 @@ export async function runChecks( let openApiSubType = openApiType; // If the tags array is empty run the loop once but with a null tag - const coalescedTags = tags?.length ? tags : [null]; + const coalescedTags = tags.changedTags?.size ? [...tags.changedTags] : [null]; for (const tag of coalescedTags) { - let tagArg = tag ? `--tag=${tag} ` : ""; - - let autorestCommand = - `npm exec --no -- autorest ` + - `--v3 ` + - `--spectral ` + - `--azure-validator ` + - `--semantic-validator=false ` + - `--model-validator=false ` + - `--message-format=json ` + - `--openapi-type=${openApiType} ` + - `--openapi-subtype=${openApiSubType} ` + - `--use=${dependenciesDir} ` + - `${tagArg} ` + - `${changedFilePath}`; - - console.log(`autorest command: ${autorestCommand}`); - const executionResult = await executeCommand(autorestCommand); - const lintDiffResult = { - autorestCommand, - rootPath: path, - readme, - tag: tag ? tag : "", - openApiType, - ...executionResult, - }; - logAutorestExecutionErrors(lintDiffResult); + console.log(`::group::Autorest for type: ${openApiType} readme: ${readme} tag: ${tag}`); + + const autorestArgs = [ + "autorest", + "--v3", + "--spectral", + "--azure-validator", + "--semantic-validator=false", + "--model-validator=false", + "--message-format=json", + `--openapi-type=${openApiType}`, + `--openapi-subtype=${openApiSubType}`, + `--use=${dependenciesDir}`, + ]; + + if (tag) { + autorestArgs.push(`--tag=${tag}`); + } + autorestArgs.push(changedFilePath); + const autorestCommand = `npm exec -- ${autorestArgs.join(" ")}`; + console.log(`\tAutorest command: ${autorestCommand}`); + + let lintDiffResult: AutorestRunResult; + try { + const executionResult = await execNpmExec(autorestArgs, { + maxBuffer: MAX_EXEC_BUFFER, + logger: debugLogger, + }); + + lintDiffResult = { + autorestCommand, + rootPath: path, + readme: tags.readme, + tag: tag ? tag : "", + openApiType, + error: null, + ...executionResult, + } as AutorestRunResult; + } catch (error) { + if (!isExecError(error)) { + throw error; + } + + const execError = error as ExecError; + lintDiffResult = { + autorestCommand, + rootPath: path, + readme: tags.readme, + tag: tag ? tag : "", + openApiType, + error: execError, + stdout: execError.stdout || "", + stderr: execError.stderr || "", + } as AutorestRunResult; + + logAutorestExecutionErrors(lintDiffResult); + } + console.log("::endgroup::"); + result.push(lintDiffResult); - console.log("Lint diff result length:", lintDiffResult.stdout.length); + console.log( + `\tAutorest result length: ${lintDiffResult.stderr.length + lintDiffResult.stdout.length}\n`, + ); } } return result; } -export async function executeCommand( - command: string, - cwd: string = ".", -): Promise<{ error: ExecException | null; stdout: string; stderr: string }> { - return new Promise((resolve) => { - exec( - command, - { cwd, encoding: "utf-8", maxBuffer: MAX_EXEC_BUFFER }, - (error, stdout, stderr) => { - resolve({ error, stdout, stderr }); - }, - ); - }); +export function getAutorestErrors(runResult: AutorestRunResult): AutoRestMessage[] { + const errors = []; + const lines = (runResult.stdout + runResult.stderr).split("\n").map((line) => line.trim()); + + for (const line of lines) { + if (line.startsWith(AUTOREST_ERROR_PREFIX)) { + const error = JSON.parse(line) as AutoRestMessage; + if (isFailure(error.level)) { + errors.push(error); + } + } + } + + return errors; } export function logAutorestExecutionErrors(runResult: AutorestRunResult) { @@ -88,19 +128,14 @@ export function logAutorestExecutionErrors(runResult: AutorestRunResult) { const stderrContainsLevelError = runResult.stderr.includes(`${autoRestPrefix}"level":"error"`); const stderrContainsLevelFatal = runResult.stderr.includes(`${autoRestPrefix}"level":"fatal"`); - // TODO: Clean up output formatting to be consistent with new output standards - console.log( - `Execution of AutoRest with LintDiff done. ` + - `Error is not null: true, ` + - `stdout contains AutoRest 'error': ${stdoutContainsLevelError}, ` + - `stdout contains AutoRest 'fatal': ${stdoutContainsLevelFatal}, ` + - `stderr contains AutoRest 'error': ${stderrContainsLevelError}, ` + - `stderr contains AutoRest 'fatal': ${stderrContainsLevelFatal}`, - ); - } else { - // TODO: Include markdown type? console.log( - `::debug:: Execution completed with no errors for tag: ${runResult.tag}, markdown: ${runResult.readme}, rootPath: ${runResult.rootPath}`, + `\tAutorest completed with errors: +\t\tExit code: ${runResult.error.code} +\t\tError is not null: true, +\t\tstdout contains AutoRest 'error': ${stdoutContainsLevelError} +\t\tstdout contains AutoRest 'fatal': ${stdoutContainsLevelFatal} +\t\tstderr contains AutoRest 'error': ${stderrContainsLevelError} +\t\tstderr contains AutoRest 'fatal': ${stderrContainsLevelFatal}`, ); } } diff --git a/eng/tools/lint-diff/src/util.ts b/eng/tools/lint-diff/src/util.ts index c6d2c9827136..e9c629ca26de 100644 --- a/eng/tools/lint-diff/src/util.ts +++ b/eng/tools/lint-diff/src/util.ts @@ -1,7 +1,6 @@ import { access, constants, readFile } from "node:fs/promises"; -import { dirname, join } from "node:path"; -import { fileURLToPath } from "node:url"; -import { ExecException } from "node:child_process"; +import { dirname, join } from "path"; +import { fileURLToPath } from "url"; /** * Enumerate files in a directory that match the given string ending @@ -27,27 +26,6 @@ export async function pathExists(path: string): Promise { } } -// TODO: Reduce to minimal set of properties -export type AutorestRunResult = { - rootPath: string; - - readme: string; - tag: string; - - error: ExecException | null; - stdout: string; - stderr: string; -}; - -export interface AutoRestMessage { - level: "information" | "warning" | "error" | "debug" | "verbose" | "fatal"; - code?: any; - message: string; - readme?: string; - tag?: string; - groupName?: string; -} - // Ignorting test coverage for these utility functions that are specific to // the engineering setup. /* v8 ignore start */ @@ -87,3 +65,25 @@ export async function getPathToDependency(dependency: string): Promise { } } /* v8 ignore stop */ + +/** + * Normalize a path to be relative to a given directory. + * @param path File path with / separators (typically given in swagger $refs) + * @param from A directory name to treat as the root (e.g. /specification/) + */ +export function relativizePath(path: string, from: string = `/specification/`): string { + const indexOfBy = path.lastIndexOf(from); + if (indexOfBy === -1) { + return path; + } + + return path.substring(indexOfBy); +} + +export function isFailure(level: string) { + return ["error", "fatal"].includes(level.toLowerCase()); +} + +export function isWarning(level: string) { + return level.toLowerCase() === "warning"; +} diff --git a/eng/tools/lint-diff/test/correlateResults.test.ts b/eng/tools/lint-diff/test/correlateResults.test.ts new file mode 100644 index 000000000000..628cbf7f6951 --- /dev/null +++ b/eng/tools/lint-diff/test/correlateResults.test.ts @@ -0,0 +1,531 @@ +import { test, describe, expect } from "vitest"; + +import { + AutorestRunResult, + LintDiffViolation, + Source, + BeforeAfter, +} from "../src/lintdiff-types.js"; +import { + correlateRuns, + getViolations, + getLintDiffViolations, + arrayIsEqual, + getNewItems, + isSameSources, +} from "../src/correlateResults.js"; +import { relativizePath } from "../src/util.js"; +import { isWindows } from "./test-util.js"; +import { Readme } from "@azure-tools/specs-shared/readme"; +import { resolve } from "path"; + +const __dirname = new URL(".", import.meta.url).pathname; + +describe.skipIf(isWindows())("correlateRuns", () => { + test("correlates before and after runs with matching readme and tag", async () => { + const fixtureRoot = resolve(__dirname, "fixtures/correlateRuns"); + const beforePath = resolve(fixtureRoot, "before"); + const afterPath = resolve(fixtureRoot, "after"); + + const beforeChecks: AutorestRunResult[] = [ + { + rootPath: beforePath, + readme: new Readme( + resolve(beforePath, "specification/service1/resource-manager/readme.md"), + ), + tag: "tag1", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const afterChecks: AutorestRunResult[] = [ + { + rootPath: afterPath, + readme: new Readme(resolve(afterPath, "specification/service1/resource-manager/readme.md")), + tag: "tag1", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const result = await correlateRuns(beforePath, beforeChecks, afterChecks); + expect(result.size).toEqual(1); + expect(result.get("specification/service1/resource-manager/readme.md#tag1")).toMatchObject({ + before: beforeChecks[0], + after: afterChecks[0], + }); + }); + + test("correlates before and after runs with matching readme and a default tag", async () => { + const fixtureRoot = resolve(__dirname, "fixtures/correlateRuns"); + const beforePath = resolve(fixtureRoot, "before"); + const afterPath = resolve(fixtureRoot, "after"); + + const beforeChecks: AutorestRunResult[] = [ + { + rootPath: beforePath, + readme: new Readme( + resolve(beforePath, "specification/service1/resource-manager/readme.md"), + ), + tag: "default-tag", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const afterChecks: AutorestRunResult[] = [ + { + rootPath: afterPath, + readme: new Readme(resolve(afterPath, "specification/service1/resource-manager/readme.md")), + tag: "tag1", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const result = await correlateRuns(beforePath, beforeChecks, afterChecks); + expect(result.size).toEqual(1); + expect(result.get("specification/service1/resource-manager/readme.md#tag1")).toMatchObject({ + before: beforeChecks[0], + after: afterChecks[0], + }); + }); + + test("correlates before and after runs with matching readme but no tag", async () => { + const fixtureRoot = resolve(__dirname, "fixtures/correlateRuns"); + const beforePath = resolve(fixtureRoot, "before"); + const afterPath = resolve(fixtureRoot, "after"); + + const afterChecks: AutorestRunResult[] = [ + { + rootPath: afterPath, + readme: new Readme(resolve(afterPath, "specification/service1/resource-manager/readme.md")), + tag: "tag2", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const result = await correlateRuns(beforePath, [], afterChecks); + expect(result.size).toEqual(1); + expect(result.get("specification/service1/resource-manager/readme.md#tag2")).toMatchObject({ + before: null, + after: afterChecks[0], + }); + }); + + test("uses no baseline if there are no matching before checks", async () => { + const fixtureRoot = resolve(__dirname, "fixtures/correlateRuns"); + const beforePath = resolve(fixtureRoot, "before"); + const afterPath = resolve(fixtureRoot, "after"); + + const beforeChecks: AutorestRunResult[] = [ + { + rootPath: beforePath, + readme: new Readme( + resolve(beforePath, "specification/service1/resource-manager/readme.md"), + ), + tag: "", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const afterChecks: AutorestRunResult[] = [ + { + rootPath: afterPath, + readme: new Readme(resolve(afterPath, "specification/service1/resource-manager/readme.md")), + tag: "tag2", + stdout: "stdout", + stderr: "stderr", + error: null, + }, + ]; + + const result = await correlateRuns(beforePath, beforeChecks, afterChecks); + expect(result.size).toEqual(1); + expect(result.get("specification/service1/resource-manager/readme.md#tag2")).toMatchObject({ + before: beforeChecks[0], + after: afterChecks[0], + }); + }); +}); + +describe("getViolations", () => { + test("returns a result", () => { + const newError = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"error","message":"Collection object returned by list operation 'RedisEnterprise_ListSkusForScaling' with 'x-ms-pageable' extension, has no property named 'value'.","code":"CollectionObjectPropertiesNaming","details":{"jsonpath":["paths","/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redisEnterprise/{clusterName}/listSkusForScaling","post","responses","200","schema"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"","range":{"start":{"line":1245,"column":21},"end":{"line":1246,"column":52}}},"source":[{"document":"file:///mnt/vss/_work/1/azure-rest-api-specs/specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json","position":{"line":1245,"column":13}}]}`; + const existingErrorInBefore = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"error","message":"Properties of a PATCH request body must not be required, property:name.","code":"PatchBodyParametersSchema","details":{"jsonpath":["paths","/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redisEnterprise/{clusterName}","patch","parameters","2","schema","properties","sku"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"RPC-Patch-V1-10","range":{"start":{"line":1,"column":0},"end":{"line":1,"column":0}}},"source":[{"document":"file:///mnt/vss/_work/1/lint-c93b354fd9c14905bb574a8834c4d69b/specification/redisenterprise/resource-manager/Microsoft.Cache/stable/2025-04-01/redisenterprise.json","position":{"line":201,"column":13}}]}`; + const correlatedErrorInAfter = ` {"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"error","message":"Properties of a PATCH request body must not be required, property:name.","code":"PatchBodyParametersSchema","details":{"jsonpath":["paths","/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/redisEnterprise/{clusterName}","patch","parameters","2","schema","properties","sku"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"RPC-Patch-V1-10","range":{"start":{"line":1,"column":0},"end":{"line":1,"column":0}}},"source":[{"document":"file:///mnt/vss/_work/1/azure-rest-api-specs/specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json","position":{"line":201,"column":13}}]}`; + + const affectedSwaggers = new Set([ + "specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json", + ]); + + const runCorrelations = new Map([ + [ + "specification/service1/resource-manager/readme.md#tag1", + { + before: { + rootPath: "before", + readme: new Readme("specification/service1/resource-manager/readme.md"), + tag: "tag1", + stdout: existingErrorInBefore, + stderr: "", + }, + after: { + rootPath: "after", + readme: new Readme("specification/service1/resource-manager/readme.md"), + tag: "tag1", + stdout: `${newError}\n${correlatedErrorInAfter}`, + stderr: "", + }, + } as BeforeAfter, + ], + ]); + + expect(getViolations(runCorrelations, affectedSwaggers)).toEqual([ + [ + expect.objectContaining({ + level: "error", + code: "CollectionObjectPropertiesNaming", + }), + ], + [ + expect.objectContaining({ + level: "error", + code: "PatchBodyParametersSchema", + }), + ], + ]); + }); + + test("correlates warnings with same basename", () => { + const beforeViolation = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"warning","message":"Use the latest version v6 of types.json.","code":"LatestVersionOfCommonTypesMustBeUsed","details":{"jsonpath":["paths","/providers/Microsoft.Cache/operations","get","parameters","0","$ref"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"","range":{"start":{"line":51,"column":20},"end":{"line":51,"column":115}}},"source":[{"document":"file:///mnt/vss/_work/1/lint-c93b354fd9c14905bb574a8834c4d69b/specification/redisenterprise/resource-manager/Microsoft.Cache/stable/2025-04-01/redisenterprise.json","position":{"line":51,"column":13}}]}`; + const afterViolation = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"warning","message":"Use the latest version v6 of types.json.","code":"LatestVersionOfCommonTypesMustBeUsed","details":{"jsonpath":["paths","/providers/Microsoft.Cache/operations","get","parameters","0","$ref"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"","range":{"start":{"line":51,"column":20},"end":{"line":51,"column":115}}},"source":[{"document":"file:///mnt/vss/_work/1/azure-rest-api-specs/specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json","position":{"line":51,"column":13}}]}`; + const runCorrelations = new Map([ + [ + "specification/service1/resource-manager/readme.md#tag1", + { + before: { + rootPath: "before", + readme: new Readme("specification/service1/resource-manager/readme.md"), + tag: "tag1", + stdout: beforeViolation, + stderr: "", + }, + after: { + rootPath: "after", + readme: new Readme("specification/service1/resource-manager/readme.md"), + tag: "tag1", + stdout: afterViolation, + stderr: "", + }, + } as BeforeAfter, + ], + ]); + + const affectedSwaggers = new Set([ + "specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json", + ]); + + expect(getViolations(runCorrelations, affectedSwaggers)).toEqual([ + [], + [ + expect.objectContaining({ + level: "warning", + code: "LatestVersionOfCommonTypesMustBeUsed", + }), + ], + ]); + }); + + test("handles empty beforeViolations", () => { + const afterViolation = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"warning","message":"Use the latest version v6 of types.json.","code":"LatestVersionOfCommonTypesMustBeUsed","details":{"jsonpath":["paths","/providers/Microsoft.Cache/operations","get","parameters","0","$ref"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"","range":{"start":{"line":51,"column":20},"end":{"line":51,"column":115}}},"source":[{"document":"file:///mnt/vss/_work/1/azure-rest-api-specs/specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json","position":{"line":51,"column":13}}]}`; + + const runCorrelations = new Map([ + [ + "specification/service1/resource-manager/readme.md#tag1", + { + before: null, + after: { + rootPath: "after", + readme: new Readme("specification/service1/resource-manager/readme.md"), + tag: "tag1", + stdout: afterViolation, + stderr: "", + }, + } as BeforeAfter, + ], + ]); + + const affectedSwaggers = new Set([ + "specification/redisenterprise/resource-manager/Microsoft.Cache/preview/2025-05-01-preview/redisenterprise.json", + ]); + + expect(getViolations(runCorrelations, affectedSwaggers)).toEqual([ + [ + expect.objectContaining({ + level: "warning", + code: "LatestVersionOfCommonTypesMustBeUsed", + }), + ], + [], + ]); + }); +}); + +describe("isSameSources", () => { + test("returns true when sources are the same", () => { + const a: Source[] = [{ document: "path/to/document1.json" } as Source]; + const b: Source[] = [{ document: "a/different/path/to/document1.json" } as Source]; + + expect(isSameSources(a, b)).toEqual(true); + }); + + test("returns true when one source is empty", () => { + const a: Source[] = [{ document: "path/to/document1.json" } as Source]; + const b: Source[] = []; + + expect(isSameSources(a, b)).toEqual(true); + }); +}); + +describe("getLintDiffViolations", async () => { + function createRunResult(stdout: string, stderr: string = ""): AutorestRunResult { + return { + rootPath: "string", + readme: new Readme("string"), + tag: "string", + error: null, + stdout: stdout, + stderr: stderr, + }; + } + + test("treats fatal errors as errors", () => { + const runResult = createRunResult( + `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"fatal","message":"openapiValidatorPluginFunc: Failed validating: TypeError: azure-openapi-validator/core/src/runner.ts/LintRunner.runRules/processRule error. ruleName: RequiredPropertiesMissingInResourceModel, specFilePath: file:///mnt/vss/_work/1/azure-rest-api-specs/specification/monitor/resource-manager/Microsoft.Insights/stable/2018-01-01/metrics_API.json, jsonPath: , errorName: TypeError, errorMessage: Cannot read properties of undefined (reading 'readOnly')"}`, + ); + const violations = getLintDiffViolations(runResult); + + expect(violations.length).toEqual(1); + }); + + test("returns an empty array on no interesting violations", () => { + const runResult = + createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} +{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); + + const violations = getLintDiffViolations(runResult); + expect(violations).toEqual([]); + }); + + test("returns an error on an interesting violation", () => { + const runResult = + createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} +{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"error","message":"Top level property names should not be repeated inside the properties bag for ARM resource 'CodeSigningAccount'. Properties [properties.sku] conflict with ARM top level properties. Please rename these.","code":"ArmResourcePropertiesBag","details":{"jsonpath":["definitions","CodeSigningAccount"],"validationCategory":"ARMViolation","providerNamespace":false,"resourceType":false,"range":{"start":{"line":1036,"column":27},"end":{"line":1051,"column":6}}},"source":[{"document":"file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json","position":{"line":1036,"column":5}}]} +{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); + + const violations = getLintDiffViolations(runResult); + expect(violations.length).toEqual(1); + expect(violations[0].level).toEqual("error"); + expect(violations[0].code).toEqual("ArmResourcePropertiesBag"); + }); + + test("returns an empty array on violations that don't have extensionname @microsoft.azure/openapi-validator", () => { + const runResult = + createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} +{"pluginName":"spectral","extensionName":"THIS IS FILTERED OUT","level":"error","message":"Top level property names should not be repeated inside the properties bag for ARM resource 'CodeSigningAccount'. Properties [properties.sku] conflict with ARM top level properties. Please rename these.","code":"ArmResourcePropertiesBag","details":{"jsonpath":["definitions","CodeSigningAccount"],"validationCategory":"ARMViolation","providerNamespace":false,"resourceType":false,"range":{"start":{"line":1036,"column":27},"end":{"line":1051,"column":6}}},"source":[{"document":"file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json","position":{"line":1036,"column":5}}]} +{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); + + const violations = getLintDiffViolations(runResult); + expect(violations).toEqual([]); + }); + + test("returns a violation with code FATAL if the result.code is undefined", () => { + const runResult = createRunResult( + `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","message": "test message with no code"}`, + ); + const violations = getLintDiffViolations(runResult); + expect(violations[0].code).toEqual("FATAL"); + }); +}); + +describe("arrayIsEqual", () => { + test("returns true for equal arrays", async () => { + const a = ["a", "b", "c"]; + const b = ["a", "b", "c"]; + + const result = arrayIsEqual(a, b); + expect(result).toEqual(true); + }); + + test("returns false for different arrays", async () => { + const a = ["a", "b", "c"]; + const b = ["a", "b", "d"]; + + const result = arrayIsEqual(a, b); + expect(result).toEqual(false); + }); + + test("returns false for different lengths", async () => { + const a = ["a", "b", "c"]; + const b = ["a", "b"]; + + const result = arrayIsEqual(a, b); + expect(result).toEqual(false); + }); + + test("returns true for empty arrays", async () => { + const a: string[] = []; + const b: string[] = []; + + const result = arrayIsEqual(a, b); + expect(result).toEqual(true); + }); + + test("returns true for equal arrays with different types", async () => { + const a = ["a", 1, "c"]; + const b = ["a", 1, "c"]; + + const result = arrayIsEqual(a, b); + expect(result).toEqual(true); + }); +}); + +describe("getNewItems", () => { + test("returns empty array when no before or after", () => { + const before: LintDiffViolation[] = []; + const after: LintDiffViolation[] = []; + + const result = getNewItems(before, after); + expect(result).toEqual([[], []]); + }); + + test("a fatal error is always new", () => { + const before = [ + { + level: "fatal", + code: "SomeCode1", + message: "Some Message", + source: [ + { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: {}, + } as LintDiffViolation, + ]; + const after = [ + { + level: "fatal", + code: "SomeCode1", + message: "Some Message", + source: [ + { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: {}, + } as LintDiffViolation, + ]; + + const result = getNewItems(before, after); + expect(result).toEqual([after, []]); + }); + + test("returns all after items when no before", () => { + const before: LintDiffViolation[] = []; + const after = [ + { + level: "error", + code: "SomeCode1", + message: "Some Message", + source: [ + { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: {}, + } as LintDiffViolation, + { + level: "error", + code: "SomeCode2", + message: "Some Message", + source: [ + { document: "path/to/document2.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: {}, + } as LintDiffViolation, + ]; + + const result = getNewItems(before, after); + expect(result).toEqual([after, []]); + }); + + test("returns only new errors", () => { + const before: LintDiffViolation[] = [ + { + level: "error", + code: "SomeCode1", + message: "Some Message", + source: [ + { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: { + jsonpath: ["some", "path"], + }, + } as LintDiffViolation, + ]; + const after = [ + { + level: "error", + code: "SomeCode1", + message: "Some Message", + source: [ + { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: { + jsonpath: ["some", "path"], + }, + } as LintDiffViolation, + { + level: "error", + code: "SomeCode2", + message: "Some Message", + source: [ + { document: "path/to/document2.json", position: { line: 1, colomn: 1 } } as Source, + ], + details: { + jsonpath: ["some", "path"], + }, + } as LintDiffViolation, + ]; + + const result = getNewItems(before, after); + expect(result).toEqual([after.slice(1), before]); + }); +}); + +describe("relativizePath", () => { + test.skipIf(isWindows()).sequential("relativizes path correctly", () => { + expect(relativizePath("/path/to/specification/service/file.json")).toEqual( + "/specification/service/file.json", + ); + }); + + test("returns the same path if it doesn't include from", () => { + expect(relativizePath("/path/to/other/file.json")).toEqual("/path/to/other/file.json"); + }); + + test("returns empty string when path is empty", () => { + expect(relativizePath("")).toEqual(""); + }); + + test.skipIf(isWindows()).sequential("uses the last instance of from", () => { + expect( + relativizePath("/path/to/specification/another/specification/service/file.json"), + ).toEqual("/specification/service/file.json"); + }); +}); diff --git a/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger-preview.json b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger-preview.json new file mode 100644 index 000000000000..b22a33bb8b49 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger-preview.json @@ -0,0 +1,525 @@ +{ + "swagger": "2.0", + "info": { + "title": "Contoso Widget Manager with a change that LintDiff will recognize", + "version": "2022-12-01", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "AadOauth2Auth": [ + "https://contoso.azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AadOauth2Auth": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "https://contoso.azure.com/.default": "" + }, + "tokenUrl": "https://login.microsoftonline.com/common/oauth2/token" + } + }, + "tags": [], + "paths": { + "/widgets": { + "get": { + "operationId": "Widgets_ListWidgets", + "description": "List Widget resources", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/PagedWidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/widgets/{widgetName}": { + "get": { + "operationId": "Widgets_GetWidget", + "description": "Fetch a Widget by name.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + }, + "patch": { + "operationId": "Widgets_CreateOrUpdateWidget", + "description": "Creates or updates a Widget asynchronously.", + "consumes": [ + "application/merge-patch+json" + ], + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "resource", + "in": "body", + "description": "The resource instance.", + "required": true, + "schema": { + "$ref": "#/definitions/WidgetSuiteCreateOrUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "201": { + "description": "The request has succeeded and a new resource has been created as a result.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "Widgets_DeleteWidget", + "description": "Delete a Widget asynchronously.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "202": { + "description": "The request has been accepted for processing, but processing has not yet completed.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + } + }, + "required": [ + "id", + "status" + ] + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/widgets/{widgetName}/operations/{operationId}": { + "get": { + "operationId": "Widgets_GetWidgetOperationStatus", + "description": "Gets status of a Widget operation.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "in": "path", + "description": "The unique ID of the operation.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + }, + "result": { + "$ref": "#/definitions/WidgetSuite", + "description": "The result of the operation." + } + }, + "required": [ + "id", + "status" + ] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + } + } + }, + "definitions": { + "Azure.Core.Foundations.Error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + }, + "target": { + "type": "string", + "description": "The target of the error." + }, + "details": { + "type": "array", + "description": "An array of details about specific errors that led to this reported error.", + "items": { + "$ref": "#/definitions/Azure.Core.Foundations.Error" + }, + "x-ms-identifiers": [] + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "An object containing more specific information than the current object about the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "Azure.Core.Foundations.ErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "Azure.Core.Foundations.InnerError": { + "type": "object", + "description": "An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md#handling-errors.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "Inner error." + } + } + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "FakedSharedModel": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + }, + "required": [ + "tag", + "createdAt" + ] + }, + "FakedSharedModelCreateOrUpdate": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + } + }, + "PagedWidgetSuite": { + "type": "object", + "description": "Paged collection of WidgetSuite items", + "properties": { + "value": { + "type": "array", + "description": "The WidgetSuite items on this page", + "items": { + "$ref": "#/definitions/WidgetSuite" + }, + "x-ms-identifiers": [] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "WidgetSuite": { + "type": "object", + "description": "A widget.", + "properties": { + "name": { + "type": "string", + "description": "The widget name.", + "readOnly": true + }, + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModel", + "description": "The faked shared model." + } + }, + "required": [ + "name", + "manufacturerId" + ] + }, + "WidgetSuiteCreateOrUpdate": { + "type": "object", + "description": "A widget.", + "properties": { + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModelCreateOrUpdate", + "description": "The faked shared model." + } + } + } + }, + "parameters": { + "Azure.Core.Foundations.ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-client-name": "apiVersion" + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger.json b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger.json new file mode 100644 index 000000000000..b22a33bb8b49 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/data-plane/swagger.json @@ -0,0 +1,525 @@ +{ + "swagger": "2.0", + "info": { + "title": "Contoso Widget Manager with a change that LintDiff will recognize", + "version": "2022-12-01", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "AadOauth2Auth": [ + "https://contoso.azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AadOauth2Auth": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "https://contoso.azure.com/.default": "" + }, + "tokenUrl": "https://login.microsoftonline.com/common/oauth2/token" + } + }, + "tags": [], + "paths": { + "/widgets": { + "get": { + "operationId": "Widgets_ListWidgets", + "description": "List Widget resources", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/PagedWidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/widgets/{widgetName}": { + "get": { + "operationId": "Widgets_GetWidget", + "description": "Fetch a Widget by name.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + }, + "patch": { + "operationId": "Widgets_CreateOrUpdateWidget", + "description": "Creates or updates a Widget asynchronously.", + "consumes": [ + "application/merge-patch+json" + ], + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "resource", + "in": "body", + "description": "The resource instance.", + "required": true, + "schema": { + "$ref": "#/definitions/WidgetSuiteCreateOrUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "201": { + "description": "The request has succeeded and a new resource has been created as a result.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "Widgets_DeleteWidget", + "description": "Delete a Widget asynchronously.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "202": { + "description": "The request has been accepted for processing, but processing has not yet completed.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + } + }, + "required": [ + "id", + "status" + ] + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/widgets/{widgetName}/operations/{operationId}": { + "get": { + "operationId": "Widgets_GetWidgetOperationStatus", + "description": "Gets status of a Widget operation.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "in": "path", + "description": "The unique ID of the operation.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + }, + "result": { + "$ref": "#/definitions/WidgetSuite", + "description": "The result of the operation." + } + }, + "required": [ + "id", + "status" + ] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + } + } + }, + "definitions": { + "Azure.Core.Foundations.Error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + }, + "target": { + "type": "string", + "description": "The target of the error." + }, + "details": { + "type": "array", + "description": "An array of details about specific errors that led to this reported error.", + "items": { + "$ref": "#/definitions/Azure.Core.Foundations.Error" + }, + "x-ms-identifiers": [] + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "An object containing more specific information than the current object about the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "Azure.Core.Foundations.ErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "Azure.Core.Foundations.InnerError": { + "type": "object", + "description": "An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md#handling-errors.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "Inner error." + } + } + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "FakedSharedModel": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + }, + "required": [ + "tag", + "createdAt" + ] + }, + "FakedSharedModelCreateOrUpdate": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + } + }, + "PagedWidgetSuite": { + "type": "object", + "description": "Paged collection of WidgetSuite items", + "properties": { + "value": { + "type": "array", + "description": "The WidgetSuite items on this page", + "items": { + "$ref": "#/definitions/WidgetSuite" + }, + "x-ms-identifiers": [] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "WidgetSuite": { + "type": "object", + "description": "A widget.", + "properties": { + "name": { + "type": "string", + "description": "The widget name.", + "readOnly": true + }, + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModel", + "description": "The faked shared model." + } + }, + "required": [ + "name", + "manufacturerId" + ] + }, + "WidgetSuiteCreateOrUpdate": { + "type": "object", + "description": "A widget.", + "properties": { + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModelCreateOrUpdate", + "description": "The faked shared model." + } + } + } + }, + "parameters": { + "Azure.Core.Foundations.ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-client-name": "apiVersion" + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/readme.md b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/readme.md new file mode 100644 index 000000000000..63ba76d2f138 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/buildState/specification/edit-in-place/readme.md @@ -0,0 +1,24 @@ +# Contoso.WidgetManager + +## Configuration + +### Basic Information + +```yaml +openapi-type: data-plane +tag: package-2022-12-01 +``` + +### Tag: package-2022-12-01 + +These settings apply only when `--tag=package-2022-12-01` is specified on the command line. + +```yaml $(tag) == 'package-2022-12-01' +input-file: + - data-plane/swagger.json +``` + +```yaml $(tag) == 'package-2022-12-01-preview' +input-file: + - data-plane/swagger-preview.json +``` \ No newline at end of file diff --git a/eng/tools/lint-diff/test/fixtures/buildState/specification/no-input-file/readme.md b/eng/tools/lint-diff/test/fixtures/buildState/specification/no-input-file/readme.md new file mode 100644 index 000000000000..38902d5bade1 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/buildState/specification/no-input-file/readme.md @@ -0,0 +1,13 @@ +# Widget + +> see https://aka.ms/autorest +> This is the AutoRest configuration file for Widget. + +## Configuration + +Required if any services under this folder are RPaaS. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +``` \ No newline at end of file diff --git a/eng/tools/lint-diff/test/fixtures/correlateRuns/before/specification/service1/resource-manager/readme.md b/eng/tools/lint-diff/test/fixtures/correlateRuns/before/specification/service1/resource-manager/readme.md new file mode 100644 index 000000000000..05510c4aa772 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/correlateRuns/before/specification/service1/resource-manager/readme.md @@ -0,0 +1,12 @@ +# Test service + +## Configuration + +Minimal file to test correlateRuns functionality. + +### Basic Information + +```yaml +openapi-type: resource-manager +tag: default-tag +``` diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/changed-dependency.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/changed-dependency.json new file mode 100644 index 000000000000..d1560d9e5521 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/changed-dependency.json @@ -0,0 +1,22 @@ +{ + "swagger": "2.0", + "info": { + "title": "Changed Dependency", + "version": "1.0.0" + }, + "definitions": { + "Item": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "valueInt": { + "type": "integer", + "format": "int32" + } + }, + "required": ["name", "valueInt"] + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/different.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/different.json new file mode 100644 index 000000000000..f0d8395862a6 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/different.json @@ -0,0 +1,8 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API WITH CHANGE", + "version": "1.0.0" + }, + "paths": {} +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/file1.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/file1.json new file mode 100644 index 000000000000..5bdeba6dbdb7 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/file1.json @@ -0,0 +1,8 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API", + "version": "1.0.0" + }, + "paths": {} +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/new-file.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/new-file.json new file mode 100644 index 000000000000..5bdeba6dbdb7 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/new-file.json @@ -0,0 +1,8 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API", + "version": "1.0.0" + }, + "paths": {} +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/with-dependency.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/with-dependency.json new file mode 100644 index 000000000000..92dca58b467f --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/after/specification/service1/with-dependency.json @@ -0,0 +1,35 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service1 API", + "version": "1.0.0" + }, + "paths": { + "/items": { + "get": { + "summary": "Get items", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "type": "array", + "items": { + "$ref": "./changed-dependency.json#/definitions/Item" + } + } + } + } + } + } + }, + "definitions": { + "LocalType": { + "type": "object", + "properties": { + "id": { + "type": "string" + } + } + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/changed-dependency.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/changed-dependency.json new file mode 100644 index 000000000000..98f096334763 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/changed-dependency.json @@ -0,0 +1,22 @@ +{ + "swagger": "2.0", + "info": { + "title": "Changed Dependency", + "version": "1.0.0" + }, + "definitions": { + "Item": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "integer", + "format": "int32" + } + }, + "required": ["name", "value"] + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/different.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/different.json new file mode 100644 index 000000000000..5bdeba6dbdb7 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/different.json @@ -0,0 +1,8 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API", + "version": "1.0.0" + }, + "paths": {} +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/file1.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/file1.json new file mode 100644 index 000000000000..5bdeba6dbdb7 --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/file1.json @@ -0,0 +1,8 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API", + "version": "1.0.0" + }, + "paths": {} +} diff --git a/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/with-dependency.json b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/with-dependency.json new file mode 100644 index 000000000000..92dca58b467f --- /dev/null +++ b/eng/tools/lint-diff/test/fixtures/getChangedSwaggers/before/specification/service1/with-dependency.json @@ -0,0 +1,35 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service1 API", + "version": "1.0.0" + }, + "paths": { + "/items": { + "get": { + "summary": "Get items", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "type": "array", + "items": { + "$ref": "./changed-dependency.json#/definitions/Item" + } + } + } + } + } + } + }, + "definitions": { + "LocalType": { + "type": "object", + "properties": { + "id": { + "type": "string" + } + } + } + } +} diff --git a/eng/tools/lint-diff/test/fixtures/getInputFiles/readme.md b/eng/tools/lint-diff/test/fixtures/getInputFiles/readme.md deleted file mode 100644 index 67e7ad4c8caf..000000000000 --- a/eng/tools/lint-diff/test/fixtures/getInputFiles/readme.md +++ /dev/null @@ -1,30 +0,0 @@ -# Contoso.WidgetManager - -## Configuration - -Minimal file to test the getInputFiles function. - -### Basic Information - -```yaml -openapi-type: data-plane -tag: package-2022-12-01 -``` - -### Tag: package-2022-12-01 - -These settings apply only when `--tag=package-2022-12-01` is specified on the command line. - -```yaml $(tag) == 'package-2022-12-01' -input-file: - - Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json -``` - -### Tag: package-2022-11-01-preview - -These settings apply only when `--tag=package-2022-11-01-preview` is specified on the command line. - -```yaml $(tag) == 'package-2022-11-01-preview' -input-file: - - Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json -``` diff --git a/eng/tools/lint-diff/test/fixtures/getTagsAndInputFiles/readme.md b/eng/tools/lint-diff/test/fixtures/getTagsAndInputFiles/readme.md deleted file mode 100644 index 7106d594e875..000000000000 --- a/eng/tools/lint-diff/test/fixtures/getTagsAndInputFiles/readme.md +++ /dev/null @@ -1,250 +0,0 @@ -# AlertsManagement - -This sample was selected for multiple tags. - -> see https://aka.ms/autorest - -This is the AutoRest configuration file for AlertManagement. - ---- - -## Getting Started - -To build the SDK for AlertManagement, simply [Install AutoRest](https://aka.ms/autorest/install) and in this folder, run: - -> `autorest` - -To see additional help and options, run: - -> `autorest --help` - ---- - -## Configuration - -### Basic Information - -These are the global settings for the AlertManagement API. - -### Suppression -``` yaml -directive: - - suppress: R3025 - reason: The rule applied incorrectly to base class. - where: - - $.definitions.ManagedResource - - suppress: R3026 - reason: The rule applied incorrectly to base class. - where: - - $.definitions.ManagedResource - - suppress: TopLevelResourcesListBySubscription - reason: The list by scope includes also list by subscription, this is an extension resource. - from: AlertsManagement.json -``` - -``` yaml -title: AlertsManagementClient -description: AlertsManagement Client -openapi-type: arm -tag: package-2023-03 -``` - -### Tag: package-preview-2024-01 - -These settings apply only when `--tag=package-preview-2024-01` is specified on the command line. - -```yaml $(tag) == 'package-preview-2024-01' -input-file: - - Microsoft.AlertsManagement/preview/2024-01-01-preview/AlertsManagement.json -``` -### Tag: package-preview-2023-08 - -These settings apply only when `--tag=package-preview-2023-08` is specified on the command line. - -```yaml $(tag) == 'package-preview-2023-08' -input-file: - - Microsoft.AlertsManagement/preview/2023-08-01-preview/AlertRuleRecommendations.json -``` -### Tag: package-preview-2023-07 - -These settings apply only when `--tag=package-preview-2023-07` is specified on the command line. - -```yaml $(tag) == 'package-preview-2023-07' -input-file: - - Microsoft.AlertsManagement/preview/2023-07-12-preview/AlertsManagement.json -``` -### Tag: package-preview-2023-04 - -These settings apply only when `--tag=package-preview-2023-04` is specified on the command line. - -```yaml $(tag) == 'package-preview-2023-04' -input-file: - - Microsoft.AlertsManagement/preview/2023-04-01-preview/TenantActivityLogAlerts.json -``` -### Tag: package-preview-2023-01 - -These settings apply only when `--tag=package-preview-2023-01` is specified on the command line. - -```yaml $(tag) == 'package-preview-2023-01' -input-file: - - Microsoft.AlertsManagement/preview/2023-01-01-preview/AlertRuleRecommendations.json -``` -### Tag: package-2023-03 - -These settings apply only when `--tag=package-2023-03` is specified on the command line. - -```yaml $(tag) == 'package-2023-03' -input-file: - - Microsoft.AlertsManagement/stable/2023-03-01/PrometheusRuleGroups.json - - Microsoft.AlertsManagement/preview/2024-01-01-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json - - Microsoft.AlertsManagement/preview/2023-08-01-preview/AlertRuleRecommendations.json - - Microsoft.AlertsManagement/preview/2021-08-08-preview/AlertProcessingRules.json -``` -### Tag: package-2021-08 - -These settings apply only when `--tag=package-2021-08` is specified on the command line. - -```yaml $(tag) == 'package-2021-08' -input-file: - - Microsoft.AlertsManagement/stable/2021-08-08/AlertProcessingRules.json - - Microsoft.AlertsManagement/preview/2021-07-22-preview/PrometheusRuleGroups.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json - - Microsoft.AlertsManagement/preview/2023-01-01-preview/AlertRuleRecommendations.json - - Microsoft.AlertsManagement/preview/2023-04-01-preview/TenantActivityLogAlerts.json -``` - -### Tag: package-preview-2021-08 - -These settings apply only when `--tag=package-preview-2021-08` is specified on the command line. - -```yaml $(tag) == 'package-preview-2021-08' -input-file: - - Microsoft.AlertsManagement/preview/2021-08-08-preview/AlertProcessingRules.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json -``` - -### Tag: package-preview-2021-07 - -These settings apply only when `--tag=package-preview-2021-07` is specified on the command line. - -```yaml $(tag) == 'package-preview-2021-07' -input-file: - - Microsoft.AlertsManagement/preview/2021-07-22-preview/PrometheusRuleGroups.json -``` - -### Tag: package-2021-04-only - -These settings apply only when `--tag=package-2021-04-only` is specified on the command line. - -```yaml $(tag) == 'package-2021-04-only' -input-file: - - Microsoft.AlertsManagement/stable/2019-03-01/AlertsManagement.json - - Microsoft.AlertsManagement/stable/2021-04-01/SmartDetectorAlertRulesApi.json -``` - -### Tag: package-preview-2021-01 - -These settings apply only when `--tag=package-preview-2021-01` is specified on the command line. - -```yaml $(tag) == 'package-preview-2021-01' -input-file: - - Microsoft.AlertsManagement/preview/2021-01-01-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2021-01-01-preview/MigrateFromSmartDetections.json -``` -### Tag: package-2019-06-preview - -These settings apply only when `--tag=package-2019-06-preview` is specified on the command line. - -```yaml $(tag) == 'package-2019-06-preview' -input-file: - - Microsoft.AlertsManagement/preview/2019-05-05-preview/ActionRules.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json - - Microsoft.AlertsManagement/stable/2019-06-01/SmartDetectorAlertRulesApi.json -``` - - -### Tag: package-2019-06 - -These settings apply only when `--tag=package-2019-06` is specified on the command line. - -```yaml $(tag) == 'package-2019-06' -input-file: - - Microsoft.AlertsManagement/stable/2019-06-01/SmartDetectorAlertRulesApi.json -``` - -### Tag: package-2019-03 - -These settings apply only when `--tag=package-2019-03` is specified on the command line. - -```yaml $(tag) == 'package-2019-03' -input-file: - - Microsoft.AlertsManagement/stable/2019-03-01/AlertsManagement.json - - Microsoft.AlertsManagement/stable/2019-03-01/SmartDetectorAlertRulesApi.json -``` - -### Tag: package-preview-2019-05 - -These settings apply only when `--tag=package-preview-2019-05` is specified on the command line. - -``` yaml $(tag) == 'package-preview-2019-05' -input-file: - - Microsoft.AlertsManagement/preview/2019-05-05-preview/ActionRules.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/AlertsManagement.json - - Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json -``` - -### Tag: package-2018-05 - -These settings apply only when `--tag=package-2018-05` is specified on the command line. - -``` yaml $(tag) == 'package-2018-05' -input-file: -- Microsoft.AlertsManagement/stable/2018-05-05/AlertsManagement.json -``` - -### Tag: package-2018-05-preview - -These settings apply only when `--tag=package-2018-05` is specified on the command line. - -``` yaml $(tag) == 'package-2018-05-preview' -input-file: -- Microsoft.AlertsManagement/preview/2018-05-05-preview/AlertsManagement.json -``` - ---- - -# Code Generation - -## Swagger to SDK - -This section describes what SDK should be generated by the automatic system. -This is not used by Autorest itself. - -``` yaml $(swagger-to-sdk) -swagger-to-sdk: - - repo: azure-sdk-for-net - - repo: azure-sdk-for-python - - repo: azure-libraries-for-java - - repo: azure-sdk-for-go - - repo: azure-sdk-for-trenton - - repo: azure-resource-manager-schemas - - repo: azure-powershell -``` - -## Python - -See configuration in [readme.python.md](./readme.python.md) - ---- - -## Go - -See configuration in [readme.go.md](./readme.go.md) - -## Java - -See configuration in [readme.java.md](./readme.java.md) diff --git a/eng/tools/lint-diff/test/generateReport.test.ts b/eng/tools/lint-diff/test/generateReport.test.ts index e995864c6eda..3ed6750bf938 100644 --- a/eng/tools/lint-diff/test/generateReport.test.ts +++ b/eng/tools/lint-diff/test/generateReport.test.ts @@ -1,257 +1,48 @@ -import { test, describe, expect } from "vitest"; - -import { AutorestRunResult } from "../src/util.js"; +import { beforeEach, test, describe, expect, vi } from "vitest"; import { - getLintDiffViolations, - arrayIsEqual, - isFailure, - isWarning, - LintDiffViolation, - getNewItems, - Source, - iconFor, - getLine, - getFile, - relativizePath, + compareLintDiffViolations, + generateAutoRestErrorReport, + generateLintDiffReport, getDocUrl, + getFile, getFileLink, + getLine, getPathSegment, - compareLintDiffViolations, + iconFor, } from "../src/generateReport.js"; +import { + Source, + LintDiffViolation, + BeforeAfter, + AutorestRunResult, + AutoRestMessage, +} from "../src/lintdiff-types.js"; import { isWindows } from "./test-util.js"; -describe("getLintDiffViolations", async () => { - function createRunResult(stdout: string, stderr: string = ""): AutorestRunResult { - return { - rootPath: "string", - readme: "string", - tag: "string", - error: null, - stdout: stdout, - stderr: stderr, - }; - } - - test.concurrent("returns an empty array on no interesting violations", ({ expect }) => { - const runResult = - createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} -{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); - - const violations = getLintDiffViolations(runResult); - expect(violations).toEqual([]); - }); - - test.concurrent("returns an error on an interesting violation", ({ expect }) => { - const runResult = - createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} -{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"error","message":"Top level property names should not be repeated inside the properties bag for ARM resource 'CodeSigningAccount'. Properties [properties.sku] conflict with ARM top level properties. Please rename these.","code":"ArmResourcePropertiesBag","details":{"jsonpath":["definitions","CodeSigningAccount"],"validationCategory":"ARMViolation","providerNamespace":false,"resourceType":false,"range":{"start":{"line":1036,"column":27},"end":{"line":1051,"column":6}}},"source":[{"document":"file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json","position":{"line":1036,"column":5}}]} -{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); - - const violations = getLintDiffViolations(runResult); - expect(violations.length).toEqual(1); - expect(violations[0].level).toEqual("error"); - expect(violations[0].code).toEqual("ArmResourcePropertiesBag"); - }); - - test.concurrent( - "returns an empty array on violations that don't have extensionname @microsoft.azure/openapi-validator", - ({ expect }) => { - const runResult = - createRunResult(`{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Validating OpenAPI spec. TypeSpec-generated: true. Path: 'file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json'"} -{"pluginName":"spectral","extensionName":"THIS IS FILTERED OUT","level":"error","message":"Top level property names should not be repeated inside the properties bag for ARM resource 'CodeSigningAccount'. Properties [properties.sku] conflict with ARM top level properties. Please rename these.","code":"ArmResourcePropertiesBag","details":{"jsonpath":["definitions","CodeSigningAccount"],"validationCategory":"ARMViolation","providerNamespace":false,"resourceType":false,"range":{"start":{"line":1036,"column":27},"end":{"line":1051,"column":6}}},"source":[{"document":"file:///home/djurek/azure-rest-api-specs/specification/codesigning/resource-manager/Microsoft.CodeSigning/stable/2025-03-30/codeSigningAccount.json","position":{"line":1036,"column":5}}]} -{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"openapiValidatorPluginFunc: Return"}`); - - const violations = getLintDiffViolations(runResult); - expect(violations).toEqual([]); - }, - ); - - test.concurrent("returns a violation with code FATAL if the result.code is undefined", () => { - const runResult = createRunResult( - `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","message": "test message with no code"}`, - ); - const violations = getLintDiffViolations(runResult); - expect(violations[0].code).toEqual("FATAL"); - }); -}); - -describe("arrayIsEqual", () => { - test.concurrent("returns true for equal arrays", async ({ expect }) => { - const a = ["a", "b", "c"]; - const b = ["a", "b", "c"]; +import { vol } from "memfs"; - const result = arrayIsEqual(a, b); - expect(result).toEqual(true); - }); - - test.concurrent("returns false for different arrays", async ({ expect }) => { - const a = ["a", "b", "c"]; - const b = ["a", "b", "d"]; - - const result = arrayIsEqual(a, b); - expect(result).toEqual(false); - }); - - test.concurrent("returns false for different lengths", async ({ expect }) => { - const a = ["a", "b", "c"]; - const b = ["a", "b"]; - - const result = arrayIsEqual(a, b); - expect(result).toEqual(false); - }); - - test.concurrent("returns true for empty arrays", async ({ expect }) => { - const a: string[] = []; - const b: string[] = []; - - const result = arrayIsEqual(a, b); - expect(result).toEqual(true); - }); - - test.concurrent("returns true for equal arrays with different types", async ({ expect }) => { - const a = ["a", 1, "c"]; - const b = ["a", 1, "c"]; - - const result = arrayIsEqual(a, b); - expect(result).toEqual(true); - }); +vi.mock("node:fs/promises", async () => { + const memfs = (await vi.importActual("memfs")) as typeof import("memfs"); + return { + ...memfs.fs.promises, + }; }); -describe("isFailure", () => { - // Data driven test - test.each([ - { level: "error", expected: true }, - { level: "fatal", expected: true }, - { level: "warning", expected: false }, - { level: "information", expected: false }, - { level: "info", expected: false }, - ])(`isFailure($level) returns $expected`, ({ level, expected }) => { - expect(isFailure(level)).toEqual(expected); - }); -}); +import { readFile } from "fs/promises"; +import { Readme } from "@azure-tools/specs-shared/readme"; -describe("isWarning", () => { - test.each([ - { level: "error", expected: false }, - { level: "fatal", expected: false }, - { level: "warning", expected: true }, - { level: "information", expected: false }, - { level: "info", expected: false }, - ])(`isWarning($level) returns $expected`, ({ level, expected }) => { - expect(isWarning(level)).toEqual(expected); - }); -}); - -describe("getNewItems", () => { - test.concurrent("returns empty array when no before or after", ({ expect }) => { - const before: LintDiffViolation[] = []; - const after: LintDiffViolation[] = []; - - const result = getNewItems(before, after); - expect(result).toEqual([[], []]); - }); - - test.concurrent("a fatal error is always new", ({ expect }) => { - const before = [ - { - level: "fatal", - code: "SomeCode1", - message: "Some Message", - source: [ - { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: {}, - } as LintDiffViolation, - ]; - const after = [ - { - level: "fatal", - code: "SomeCode1", - message: "Some Message", - source: [ - { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: {}, - } as LintDiffViolation, - ]; - - const result = getNewItems(before, after); - expect(result).toEqual([after, []]); - }); - - test.concurrent("returns all after items when no before", ({ expect }) => { - const before: LintDiffViolation[] = []; - const after = [ - { - level: "error", - code: "SomeCode1", - message: "Some Message", - source: [ - { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: {}, - } as LintDiffViolation, - { - level: "error", - code: "SomeCode2", - message: "Some Message", - source: [ - { document: "path/to/document2.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: {}, - } as LintDiffViolation, - ]; - - const result = getNewItems(before, after); - expect(result).toEqual([after, []]); - }); - - test.concurrent("returns only new errors", ({ expect }) => { - const before: LintDiffViolation[] = [ - { - level: "error", - code: "SomeCode1", - message: "Some Message", - source: [ - { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: { - jsonpath: ["some", "path"], - }, - } as LintDiffViolation, - ]; - const after = [ - { - level: "error", - code: "SomeCode1", - message: "Some Message", - source: [ - { document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: { - jsonpath: ["some", "path"], - }, - } as LintDiffViolation, - { - level: "error", - code: "SomeCode2", - message: "Some Message", - source: [ - { document: "path/to/document2.json", position: { line: 1, colomn: 1 } } as Source, - ], - details: { - jsonpath: ["some", "path"], - }, - } as LintDiffViolation, - ]; - - const result = getNewItems(before, after); - expect(result).toEqual([after.slice(1), before]); - }); +vi.mock("../src/util.js", async () => { + const original = await vi.importActual("../src/util.js"); + return { + ...original, + getDependencyVersion: vi.fn().mockResolvedValue("1.0.0"), + getPathToDependency: vi.fn().mockResolvedValue("path/to/dependency"), + }; }); describe("iconFor", () => { test.each([ + { input: "fatal", expected: ":x:" }, { input: "error", expected: ":x:" }, { input: "warning", expected: ":warning:" }, { input: "info", expected: ":warning:" }, @@ -261,7 +52,7 @@ describe("iconFor", () => { }); describe("getLine", () => { - test.concurrent("returns the line number", ({ expect }) => { + test("returns the line number", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -274,7 +65,7 @@ describe("getLine", () => { expect(actual).toEqual(1); }); - test.concurrent("returns undefined when source is empty array", ({ expect }) => { + test("returns undefined when source is empty array", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -287,7 +78,7 @@ describe("getLine", () => { expect(actual).toEqual(undefined); }); - test.concurrent("returns undefined when source position is empty", ({ expect }) => { + test("returns undefined when source position is empty", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -300,7 +91,7 @@ describe("getLine", () => { expect(actual).toEqual(undefined); }); - test.concurrent("returns 0 when source position is 0", ({ expect }) => { + test("returns 0 when source position is 0", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -315,7 +106,7 @@ describe("getLine", () => { }); describe("getFile", () => { - test.concurrent("returns the file name", ({ expect }) => { + test("returns the file name", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -328,7 +119,7 @@ describe("getFile", () => { expect(actual).toEqual("path/to/document1.json"); }); - test.concurrent("returns empty string when source is empty array", ({ expect }) => { + test("returns empty string when source is empty array", () => { const violation = { level: "fatal", code: "SomeCode1", @@ -342,60 +133,36 @@ describe("getFile", () => { }); }); -describe("relativizePath", () => { - test.skipIf(isWindows) - .concurrent("relativizes path correctly", ({ expect }) => { - expect(relativizePath("/path/to/specification/service/file.json")).toEqual( - "/specification/service/file.json", - ); - }); - - test.concurrent("returns the same path if it doesn't include from", ({ expect }) => { - expect(relativizePath("/path/to/other/file.json")).toEqual("/path/to/other/file.json"); - }); - - test.concurrent("returns empty string when path is empty", ({ expect }) => { - expect(relativizePath("")).toEqual(""); - }); - - test.skipIf(isWindows) - .concurrent("uses the last instance of from", ({ expect }) => { - expect( - relativizePath("/path/to/specification/another/specification/service/file.json"), - ).toEqual("/specification/service/file.json"); - }); -}); - describe("getDocUrl", () => { - test.concurrent("returns a pointer to a kebab-cased markdown file", ({ expect }) => { + test("returns a pointer to a kebab-cased markdown file", () => { expect(getDocUrl("TestViolation")).toEqual( "https://github.com/Azure/azure-openapi-validator/blob/main/docs/test-violation.md", ); }); - test.concurrent("returns N/A when code is FATAL", ({ expect }) => { + test("returns N/A when code is FATAL", () => { expect(getDocUrl("FATAL")).toEqual("N/A"); }); }); describe("getFileLink", () => { - test.concurrent("does not include #L if line is null", ({ expect }) => { - expect(getFileLink("abc123", "file.json", null)).not.toContain("#L"); + test("does not include #L if line is null", () => { + expect(getFileLink("repo/path", "abc123", "file.json", null)).not.toContain("#L"); }); - test.concurrent("includes #L if line is not null", ({ expect }) => { - expect(getFileLink("abc123", "file.json", 1)).toContain("#L1"); + test("includes #L if line is not null", () => { + expect(getFileLink("repo/path", "abc123", "file.json", 1)).toContain("#L1"); }); - test.concurrent("returns the correct link with preceeding forward slash", ({ expect }) => { - expect(getFileLink("abc123", "/file.json", 1)).toEqual( - "https://github.com/Azure/azure-rest-api-specs/blob/abc123/file.json#L1", + test("returns the correct link with preceeding forward slash", () => { + expect(getFileLink("repo/path", "abc123", "/file.json", 1)).toEqual( + "https://github.com/repo/path/blob/abc123/file.json#L1", ); }); }); describe("getPathSegment", () => { - test.concurrent("returns trailing segments of a path", ({ expect }) => { + test("returns trailing segments of a path", () => { expect( getPathSegment( "/specification/recoveryservicessiterecovery/resource-manager/Microsoft.RecoveryServices/stable/2025-01-01/service.json", @@ -405,7 +172,7 @@ describe("getPathSegment", () => { }); describe("compareLintDiffViolations", () => { - test.concurrent("returns 0 if equal", ({ expect }) => { + test("returns 0 if equal", () => { const a: LintDiffViolation = { level: "error", code: "SomeCode1", @@ -419,7 +186,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(0); }); - test.concurrent("returns 0 if a and b are equal and don't have lines", ({ expect }) => { + test("returns 0 if a and b are equal and don't have lines", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -436,7 +203,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(0); }); - test.concurrent("returns -1 if a level is less than b's level", ({ expect }) => { + test("returns -1 if a level is less than b's level", () => { const a: LintDiffViolation = { level: "error", code: "SomeCode1", @@ -450,7 +217,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(-1); }); - test.concurrent("returns 1 if a level is greater than b's level", ({ expect }) => { + test("returns 1 if a level is greater than b's level", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -464,7 +231,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(1); }); - test.concurrent("returns -1 if a's file is less than b's file", ({ expect }) => { + test("returns -1 if a's file is less than b's file", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -481,7 +248,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(-1); }); - test.concurrent("returns 1 if a's file is greater than b's file", ({ expect }) => { + test("returns 1 if a's file is greater than b's file", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -498,7 +265,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(1); }); - test.concurrent("returns -1 if a's line is less than b's line", ({ expect }) => { + test("returns -1 if a's line is less than b's line", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -515,7 +282,7 @@ describe("compareLintDiffViolations", () => { expect(actual).toEqual(-1); }); - test.concurrent("returns 1 if a's line is greater than b's line", ({ expect }) => { + test("returns 1 if a's line is greater than b's line", () => { const a: LintDiffViolation = { level: "warning", code: "SomeCode1", @@ -531,4 +298,305 @@ describe("compareLintDiffViolations", () => { const actual = compareLintDiffViolations(a, b); expect(actual).toEqual(1); }); + + test("returns -1 if a's level is fatal and b's level is not", () => { + const a: LintDiffViolation = { + level: "fatal", + code: "SomeCode1", + message: "Some Message", + source: [{ document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source], + details: {}, + } as LintDiffViolation; + const b: LintDiffViolation = { + ...a, + level: "error", + }; + + const actual = compareLintDiffViolations(a, b); + expect(actual).toEqual(-1); + }); + + test("returns 1 if a's level is not fatal and b's level is", () => { + const a: LintDiffViolation = { + level: "error", + code: "SomeCode1", + message: "Some Message", + source: [{ document: "path/to/document1.json", position: { line: 1, colomn: 1 } } as Source], + details: {}, + } as LintDiffViolation; + const b: LintDiffViolation = { + ...a, + level: "fatal", + }; + + const actual = compareLintDiffViolations(a, b); + expect(actual).toEqual(1); + }); +}); + +describe("generateLintDiffReport", () => { + beforeEach(() => { + vol.reset(); + + // Seed current filesystem so that "." exists. + vol.mkdirSync(".", { recursive: true }); + }); + test.skipIf(isWindows())("fails if new violations include an error", async ({ expect }) => { + const afterViolation = { + extensionName: "@microsoft.azure/openapi-validator", + level: "error", + code: "SomeCode", + message: "Some Message", + source: [ + { + document: + "/home/test/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json", + position: { line: 1, colomn: 1 }, + } as Source, + ], + details: {}, + }; + + const beforeResult = { + error: null, + stdout: "", + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + const afterResult = { + error: null, + stdout: JSON.stringify(afterViolation), + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + + const runCorrelations = new Map([ + ["file1.md", { before: beforeResult, after: afterResult }], + ]); + + const outFile = "test-output.md"; + const actual = await generateLintDiffReport( + runCorrelations, + new Set([ + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json", + ]), + outFile, + "baseBranch", + "compareSha", + "repo/path", + ); + expect(actual).toBe(false); + expect(await readFile(outFile, { encoding: "utf-8" })).toMatchInlineSnapshot(` + "| Compared specs ([v1.0.0](https://www.npmjs.com/package/@microsoft.azure/openapi-validator/v/1.0.0)) | new version | base version | + | --- | --- | --- | + | default | [default](https://github.com/repo/path/blob/compareSha/file1.md) | [default](https://github.com/repo/path/blob/baseBranch/file1.md) | + + + **[must fix]The following errors/warnings are intorduced by current PR:** + + | Rule | Message | Related RPC [For API reviewers] | + | ---- | ------- | ------------------------------- | + | :x: [SomeCode](https://github.com/Azure/azure-openapi-validator/blob/main/docs/some-code.md) | Some Message
Location: [Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json#L1](https://github.com/repo/path/blob/compareSha/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json#L1) | | + + " + `); + }); + + test.skipIf(isWindows())("fails if new violation includes a fatal error", async ({ expect }) => { + const afterViolation = { + extensionName: "@microsoft.azure/openapi-validator", + level: "fatal", + code: "FATAL", + message: "A fatal error occurred", + source: [], + details: {}, + }; + + const beforeResult = { + error: null, + stdout: "", + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + const afterResult = { + error: null, + stdout: JSON.stringify(afterViolation), + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + + const runCorrelations = new Map([ + ["file1.md", { before: beforeResult, after: afterResult }], + ]); + + const outFile = "test-output-fatal.md"; + const actual = await generateLintDiffReport( + runCorrelations, + new Set([ + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json", + ]), + outFile, + "baseBranch", + "compareSha", + "repo/path", + ); + expect(actual).toBe(false); + expect(await readFile(outFile, { encoding: "utf-8" })).toMatchInlineSnapshot(` + "| Compared specs ([v1.0.0](https://www.npmjs.com/package/@microsoft.azure/openapi-validator/v/1.0.0)) | new version | base version | + | --- | --- | --- | + | default | [default](https://github.com/repo/path/blob/compareSha/file1.md) | [default](https://github.com/repo/path/blob/baseBranch/file1.md) | + + + **[must fix]The following errors/warnings are intorduced by current PR:** + + | Rule | Message | Related RPC [For API reviewers] | + | ---- | ------- | ------------------------------- | + | :x: FATAL | A fatal error occurred | | + + " + `); + }); + + test.skipIf(isWindows())( + "passes if new violations do not include an error (warnings only)", + async ({ expect }) => { + const afterViolation = { + extensionName: "@microsoft.azure/openapi-validator", + level: "warning", + code: "SomeCode", + message: "Some Message", + source: [ + { + document: + "/home/test/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json", + position: { line: 1, colomn: 1 }, + } as Source, + ], + details: {}, + }; + + const beforeResult = { + error: null, + stdout: "", + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + const afterResult = { + error: null, + stdout: JSON.stringify(afterViolation), + stderr: "", + rootPath: "", + readme: new Readme("file1.md"), + tag: "", + } as AutorestRunResult; + + const runCorrelations = new Map([ + ["file1.md", { before: beforeResult, after: afterResult }], + ]); + + const outFile = "test-output.md"; + const actual = await generateLintDiffReport( + runCorrelations, + new Set([ + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json", + ]), + outFile, + "baseBranch", + "compareSha", + "repo/path", + ); + expect(actual).toBe(true); + + expect(await readFile(outFile, { encoding: "utf-8" })).toMatchInlineSnapshot(` + "| Compared specs ([v1.0.0](https://www.npmjs.com/package/@microsoft.azure/openapi-validator/v/1.0.0)) | new version | base version | + | --- | --- | --- | + | default | [default](https://github.com/repo/path/blob/compareSha/file1.md) | [default](https://github.com/repo/path/blob/baseBranch/file1.md) | + + + **[must fix]The following errors/warnings are intorduced by current PR:** + + | Rule | Message | Related RPC [For API reviewers] | + | ---- | ------- | ------------------------------- | + | :warning: [SomeCode](https://github.com/Azure/azure-openapi-validator/blob/main/docs/some-code.md) | Some Message
Location: [Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json#L1](https://github.com/repo/path/blob/compareSha/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json#L1) | | + + " + `); + }, + ); +}); + +describe("generateAutoRestErrorReport", () => { + beforeEach(() => { + vol.reset(); + + // Seed current filesystem so that "." exists. + vol.mkdirSync(".", { recursive: true }); + }); + + test("generates a report with errors", async () => { + const autoRestErrors = [ + { + result: { + readme: new Readme("dummy/rootPath/readme.md"), + tag: "tag1", + rootPath: "dummy/rootPath", + error: null, + stdout: "dummy stdout", + stderr: "dummy stderr", + }, + errors: [ + { level: "error", message: "Error message 1" } as AutoRestMessage, + { level: "fatal", message: "Fatal error message" } as AutoRestMessage, + ], + }, + { + result: { + readme: new Readme("dummy/rootPath/readme2.md"), + tag: "tag2", + rootPath: "dummy/rootPath", + error: null, + stdout: "dummy stdout", + stderr: "dummy stderr", + }, + errors: [{ level: "error", message: "Error message 2" } as AutoRestMessage], + }, + ]; + + const outFile = "autorest-error-report.md"; + await generateAutoRestErrorReport(autoRestErrors, outFile); + + const actual = await readFile(outFile, { encoding: "utf-8" }); + expect(actual).toMatchInlineSnapshot(` + "**AutoRest errors:** + + Readme: \`readme.md\` + Tag: \`tag1\` + Errors: + | Level | Message | + | ----- | ------- | + | :x: error | Error message 1 | + | :x: fatal | Fatal error message | + + + Readme: \`readme2.md\` + Tag: \`tag2\` + Errors: + | Level | Message | + | ----- | ------- | + | :x: error | Error message 2 | + + + " + `); + }); }); diff --git a/eng/tools/lint-diff/test/lint-diff.test.ts b/eng/tools/lint-diff/test/lint-diff.test.ts index 732a915d798c..8a11bd01418f 100644 --- a/eng/tools/lint-diff/test/lint-diff.test.ts +++ b/eng/tools/lint-diff/test/lint-diff.test.ts @@ -1,10 +1,18 @@ import { execa } from "execa"; -import { test, describe } from "vitest"; +import { test, describe, expect } from "vitest"; // TODO: Actual tests describe("e2e", () => { - test.concurrent("Executes", async ({ expect }) => { - const { exitCode } = await execa("npm", ["exec", "--no", "--", "lint-diff"], { reject: false }); - expect(exitCode).toBe(1); + test("Executes", async () => { + const output = await execa("npm", ["exec", "--no", "--", "lint-diff"], { reject: false }); + + try { + expect(output.exitCode).toBe(1); + } catch (error) { + console.log(`stdout: ${output.stdout}`); + console.log(`stderr: ${output.stderr}`); + console.error("Error:", error); + throw error; + } }); }); diff --git a/eng/tools/lint-diff/test/markdown-utils.test.ts b/eng/tools/lint-diff/test/markdown-utils.test.ts index 408b78d537a6..e133e72a563f 100644 --- a/eng/tools/lint-diff/test/markdown-utils.test.ts +++ b/eng/tools/lint-diff/test/markdown-utils.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, test, describe, vi, Mock } from "vitest"; +import { beforeEach, test, describe, vi, Mock, expect } from "vitest"; import { readFile } from "fs/promises"; import { join } from "node:path"; @@ -7,13 +7,11 @@ import axios from "axios"; import { deduplicateTags, getDocRawUrl, - getInputFiles, getDefaultTag, - getAllTags, getOpenapiType, - getTagsAndInputFiles, getRelatedArmRpcFromDoc, } from "../src/markdown-utils.js"; +import { Readme } from "@azure-tools/specs-shared/readme"; vi.mock("axios"); @@ -25,7 +23,7 @@ describe("deduplicateTags", () => { // Original comment describing deduplicateTags // if one tag 'A' 's input files contains all the input files of Tag 'B' , then B tag will be de-duplicated - test.concurrent("deduplicates tags", async ({ expect }) => { + test("deduplicates tags", () => { const tags = [ { tagName: "tag1", inputFiles: ["file1", "file2"] }, { tagName: "tag2", inputFiles: ["file1"] }, // Covered in tag1 @@ -38,30 +36,8 @@ describe("deduplicateTags", () => { }); }); -describe("getInputFiles", () => { - test.concurrent("returns input files for a readme content's tag", async ({ expect }) => { - const readmeContent = await readFile(join(__dirname, "fixtures/getInputFiles/readme.md"), { - encoding: "utf-8", - }); - - const inputFiles = await getInputFiles(readmeContent, "package-2022-12-01"); - - expect(inputFiles).toEqual(["Azure.Contoso.WidgetManager/stable/2022-12-01/widgets.json"]); - }); - - test.concurrent("returns empty array when no input files are found", async ({ expect }) => { - const readmeContent = await readFile(join(__dirname, "fixtures/getInputFiles/readme.md"), { - encoding: "utf-8", - }); - - const inputFiles = await getInputFiles(readmeContent, "TAG-NOT-FOUND"); - - expect(inputFiles).toEqual([]); - }); -}); - -describe("getDocRawUrl", async () => { - test.concurrent("returns the expected doc url", async ({ expect }) => { +describe("getDocRawUrl", () => { + test("returns the expected doc url", () => { const docUrl = getDocRawUrl("Post201Response"); expect(docUrl).toEqual( @@ -69,155 +45,117 @@ describe("getDocRawUrl", async () => { ); }); - test.concurrent("returns N/A on FATAL", async ({ expect }) => { + test("returns N/A on FATAL", () => { const docUrl = getDocRawUrl("FATAL"); expect(docUrl).toEqual("N/A"); }); }); -describe("getDefaultTag", async () => { - test.concurrent( - "returns default tag when there is a Basic Information header", - async ({ expect }) => { - const readmeContent = await readFile( - join(__dirname, "fixtures/getDefaultTag/hasBasicInformation.md"), - { - encoding: "utf-8", - }, - ); - 6; - - const defaultTag = getDefaultTag(readmeContent); - - expect(defaultTag).toEqual("package-2022-12-01"); - }, - ); +describe("getDefaultTag", () => { + test("returns default tag when there is a Basic Information header", async () => { + const defaultTag = await getDefaultTag( + new Readme(join(__dirname, "fixtures/getDefaultTag/hasBasicInformation.md")), + ); - test.concurrent( - "returns default tag when there is no Basic Information header", - async ({ expect }) => { - const readmeContent = await readFile( - join(__dirname, "fixtures/getDefaultTag/noBasicInformation.md"), - { - encoding: "utf-8", - }, - ); + expect(defaultTag).toEqual("package-2022-12-01"); + }); - const defaultTag = getDefaultTag(readmeContent); + test("returns default tag when there is no Basic Information header", async () => { + const defaultTag = await getDefaultTag( + new Readme(join(__dirname, "fixtures/getDefaultTag/noBasicInformation.md")), + ); - expect(defaultTag).toEqual("package-2023-07-preview"); - }, - ); + expect(defaultTag).toEqual("package-2023-07-preview"); + }); - test.concurrent("returns empty string when there is no default tag", async ({ expect }) => { - const readmeContent = await readFile( - join(__dirname, "fixtures/getDefaultTag/noDefaultTag.md"), - { - encoding: "utf-8", - }, + test("returns empty string when there is no default tag", async () => { + const defaultTag = await getDefaultTag( + new Readme(join(__dirname, "fixtures/getDefaultTag/noDefaultTag.md")), ); - const defaultTag = getDefaultTag(readmeContent); - expect(defaultTag).toEqual(""); }); -}); -describe("getAllTags", async () => { - test.concurrent("returns all tags", async ({ expect }) => { - const readmeContent = await readFile(join(__dirname, "fixtures/getAllTags/readme.md"), { - encoding: "utf-8", - }); - - const tags = getAllTags(readmeContent); - - expect(tags).toEqual([ - "package-preview-2024-01", - "package-preview-2023-08", - "package-preview-2023-07", - "package-preview-2023-04", - "package-preview-2023-01", - "package-2023-03", - "package-2021-08", - "package-preview-2021-08", - "package-preview-2021-07", - "package-2021-04-only", - "package-preview-2021-01", - "package-2019-06-preview", - "package-2019-06", - "package-2019-03", - "package-preview-2019-05", - "package-2018-05", - "package-2018-05-preview", - ]); - }); + test.each([ + { + description: "without Basic Information header", + readmeContent: `# Some header +This should be parsed as a string, not a Date object. +\`\`\`yaml +tag: 2025-01-01 +\`\`\` +`, + }, + { + description: "with Basic Information header", + readmeContent: `# Basic Information +This should be parsed as a string, not a Date object. +\`\`\`yaml +tag: 2025-01-01 +\`\`\` +`, + }, + ])( + "returns a string for default tag even when the tag is formatted like a date ($description)", + async ({ readmeContent }) => { + const defaultTag = await getDefaultTag(new Readme("readme", { content: readmeContent })); + + expect(defaultTag).not.toBeInstanceOf(Date); + expect(defaultTag).toBeTypeOf("string"); + expect(defaultTag).toEqual("2025-01-01"); + }, + ); }); -describe("getOpenapiType", async () => { - test.concurrent("openapi-type found and valid", async ({ expect }) => { +describe("getOpenapiType", () => { + test("openapi-type found and valid", async () => { const markdownFile = join(__dirname, "fixtures/getOpenapiType/type-found-and-valid.md"); - const openapiType = await getOpenapiType(markdownFile); + const readme = new Readme(markdownFile); + const openapiType = await getOpenapiType(readme); expect(openapiType).toEqual("data-plane"); }); - test.skipIf(isWindows) - .concurrent("openapi-type found but not valid", async ({ expect }) => { + test.skipIf(isWindows())("openapi-type found but not valid", async () => { const markdownFile = join( __dirname, "fixtures/getOpenapiType/specification/service1/data-plane/type-found-not-valid-readme.md", ); - const openapiType = await getOpenapiType(markdownFile); + const readme = new Readme(markdownFile); + const openapiType = await getOpenapiType(readme); expect(openapiType).toEqual("data-plane"); }); - test.skipIf(isWindows) - .concurrent("openapi-type not found, type arm", async ({ expect }) => { + test.skipIf(isWindows())("openapi-type not found, type arm", async () => { const markdownFile = join( __dirname, "fixtures/getOpenapiType/specification/service1/resource-manager/inferred-resource-manager-readme.md", ); - const openApiType = await getOpenapiType(markdownFile); - expect(openApiType).toEqual("arm"); + const readme = new Readme(markdownFile); + const openapiType = await getOpenapiType(readme); + expect(openapiType).toEqual("arm"); }); - test.skipIf(isWindows) - .concurrent("openapi-type not found, type data-plane", async ({ expect }) => { + test.skipIf(isWindows())("openapi-type not found, type data-plane", async () => { const markdownFile = join( __dirname, "fixtures/getOpenapiType/specification/service1/data-plane/inferred-data-plane-readme.md", ); - const openApiType = await getOpenapiType(markdownFile); - expect(openApiType).toEqual("data-plane"); + const readme = new Readme(markdownFile); + const openapiType = await getOpenapiType(readme); + expect(openapiType).toEqual("data-plane"); }); - test.concurrent("openapi-type not found, type default", async ({ expect }) => { + test("openapi-type not found, type default", async () => { const markdownFile = join(__dirname, "fixtures/getOpenapiType/default.md"); - const openApiType = await getOpenapiType(markdownFile); - expect(openApiType).toEqual("default"); - }); -}); - -describe("getTagsAndInputFiles", async () => { - test.concurrent("gets accurate input files for tag", async ({ expect }) => { - const readmeContent = await readFile( - join(__dirname, "fixtures/getTagsAndInputFiles/readme.md"), - { encoding: "utf-8" }, - ); - - const actual = await getTagsAndInputFiles(["package-preview-2019-05"], readmeContent); - expect(actual.length).toEqual(1); - expect(actual[0].tagName).toEqual("package-preview-2019-05"); - expect(actual[0].inputFiles).toEqual([ - "Microsoft.AlertsManagement/preview/2019-05-05-preview/ActionRules.json", - "Microsoft.AlertsManagement/preview/2019-05-05-preview/AlertsManagement.json", - "Microsoft.AlertsManagement/preview/2019-05-05-preview/SmartGroups.json", - ]); + const readme = new Readme(markdownFile); + const openapiType = await getOpenapiType(readme); + expect(openapiType).toEqual("default"); }); }); -describe("getRelatedArmRpcFromDoc", async () => { +describe("getRelatedArmRpcFromDoc", () => { // Tests are run sequentially to avoid concurrency issues with axios mocking beforeEach(() => { (axios.get as Mock).mockReset(); @@ -233,13 +171,13 @@ describe("getRelatedArmRpcFromDoc", async () => { }); } - test.sequential("returns empty array on FATAL", async ({ expect }) => { + test("returns empty array on FATAL", async () => { const rule = await getRelatedArmRpcFromDoc("FATAL"); expect(rule).toEqual([]); }); - test.sequential("returns a rule from the cache", async ({ expect }) => { + test("returns a rule from the cache", async () => { await mockResponseFile("lro-patch202.md"); await getRelatedArmRpcFromDoc("LroPatch202"); @@ -248,34 +186,34 @@ describe("getRelatedArmRpcFromDoc", async () => { expect((axios.get as Mock).mock.calls.length).toBe(1); }); - test.sequential("returns an empty array when no rules are found", async ({ expect }) => { + test("returns an empty array when no rules are found", async () => { await mockResponseFile("api-host.md"); const rules = await getRelatedArmRpcFromDoc("ApiHost"); expect(rules).toEqual([]); }); - test.sequential("returns rules when a list is found", async ({ expect }) => { + test("returns rules when a list is found", async () => { await mockResponseFile("system-data-definitions-common-types.md"); const rules = await getRelatedArmRpcFromDoc("SystemDataDefinitionsCommonTypes"); expect(rules).toEqual(["RPC-SystemData-V1-01", "RPC-SystemData-V1-02"]); }); - test.sequential("returns rules when a list with commas is found", async ({ expect }) => { + test("returns rules when a list with commas is found", async () => { await mockResponseFile("lro-patch202.md"); const rules = await getRelatedArmRpcFromDoc("LroPatch202"); expect(rules).toEqual(["RPC-Patch-V1-06", "RPC-Async-V1-08"]); }); - test.sequential("returns an empty set when the docUrl is not found", async ({ expect }) => { + test("returns an empty set when the docUrl is not found", async () => { (axios.get as Mock).mockRejectedValue(new Error("404 Not Found")); const rules = await getRelatedArmRpcFromDoc("DoesNotExist"); expect(rules).toEqual([]); }); - test.sequential("does not throw on axios errors", async ({ expect }) => { + test("does not throw on axios errors", () => { (axios.get as Mock).mockRejectedValue(new Error("404 Not Found")); expect(async () => await getRelatedArmRpcFromDoc("DoesNotExist")).not.toThrow(); diff --git a/eng/tools/lint-diff/test/processChanges.fs.test.ts b/eng/tools/lint-diff/test/processChanges.fs.test.ts index e746b3995572..f588d66984ae 100644 --- a/eng/tools/lint-diff/test/processChanges.fs.test.ts +++ b/eng/tools/lint-diff/test/processChanges.fs.test.ts @@ -1,108 +1,23 @@ -import { beforeEach, vi, test, describe } from "vitest"; +import { afterEach, vi, test, describe, expect } from "vitest"; import { vol } from "memfs"; -import { getAffectedReadmes, readFileList } from "../src/processChanges.js"; -import { afterEach } from "node:test"; -import { isWindows } from "./test-util.js"; +import { readFileList } from "../src/processChanges.js"; // These tests are in a separate module because fs mocking is difficult to undo -vi.mock("node:fs", () => { - const memfs = require("memfs"); - return { - ...memfs.fs, - }; -}); -vi.mock("node:fs/promises", () => { - const memfs = require("memfs"); +vi.mock("node:fs/promises", async () => { + const memfs = (await vi.importActual("memfs")) as typeof import("memfs"); return { ...memfs.fs.promises, }; }); -describe("getAffectedReadmes", () => { - beforeEach(() => { - vol.reset(); - }); - - test.skipIf(isWindows) - .concurrent("includes expected changed file", async ({ expect }) => { - const files = { - "./specification/a/readme.md": "a", - "./specification/b/readme.md": "b", - }; - vol.fromJSON(files, "."); - - const changedFiles = ["specification/a/readme.md"]; - const affectedReadmes = await getAffectedReadmes(changedFiles, "."); - expect(affectedReadmes).toEqual(["specification/a/readme.md"]); - }); - - test.concurrent("excludes non-changed file outside of scope", async ({ expect }) => { - const files = { - "./specification/a/readme.md": "a", - "./specification/b/readme.md": "b", - }; - vol.fromJSON(files, "."); - - const changedFiles = ["specification/a/readme.md"]; - const affectedReadmes = await getAffectedReadmes(changedFiles, "."); - expect(affectedReadmes).not.toContain(["specification/b/readme.md"]); - }); - - test.skipIf(isWindows) - .concurrent("includes files up the heirarchy", async ({ expect }) => { - const files = { - "./specification/a/readme.md": "a", - "./specification/a/b/c/readme.md": "c", - }; - vol.fromJSON(files, "."); - - const changedFiles = ["specification/a/b/c/readme.md"]; - const affectedReadmes = await getAffectedReadmes(changedFiles, "."); - expect(affectedReadmes).toEqual(["specification/a/b/c/readme.md", "specification/a/readme.md"]); - }); - - test.skipIf(isWindows) - .concurrent( - "lists reademe files in folders with affected swagger files", - async ({ expect }) => { - const files = { - "./specification/service1/readme.md": "a", - "./specification/service1/b/c/swagger.json": "{}", - "./specification/service2/readme.md": "b", - "./specification/service2/swagger.json": "{}", - }; - vol.fromJSON(files, "."); - - const changedFiles = ["specification/service1/b/c/swagger.json"]; - const affectedReadmes = await getAffectedReadmes(changedFiles, "."); - expect(affectedReadmes).toEqual(["specification/service1/readme.md"]); - }, - ); - - test.skipIf(isWindows) - .concurrent("excludes files outside of specification/", async ({ expect }) => { - const files = { - "./repo-root/specification/a/readme.md": "a", - "./repo-root/specification/b/readme.md": "b", - "./repo-root/readme.md": "root", - "./repo-root/some.json": "{}", - }; - vol.fromJSON(files, "."); - - const changedFiles = ["some.json", "readme.md", "specification/a/readme.md"]; - const affectedReadmes = await getAffectedReadmes(changedFiles, "./repo-root"); - expect(affectedReadmes).toEqual(["specification/a/readme.md"]); - }); -}); - -describe("readFileList", async () => { +describe("readFileList", () => { afterEach(() => { vol.reset(); }); - test.concurrent("returns a list of items", async ({ expect }) => { + test("returns a list of items", async () => { // Using test1.txt because somehow another test affects the // value of test.txt in this context. const files = { @@ -114,7 +29,7 @@ describe("readFileList", async () => { expect(fileList).toEqual(["line1", "line2"]); }); - test.concurrent("returns an empty list if the file is empty", async ({ expect }) => { + test("returns an empty list if the file is empty", async () => { const files = { "./test.txt": "", }; diff --git a/eng/tools/lint-diff/test/processChanges.test.ts b/eng/tools/lint-diff/test/processChanges.test.ts index 48652d9db12b..294f4c3c5de1 100644 --- a/eng/tools/lint-diff/test/processChanges.test.ts +++ b/eng/tools/lint-diff/test/processChanges.test.ts @@ -1,169 +1,27 @@ -import { test, describe } from "vitest"; -import { join } from "node:path"; +import { test, describe, expect } from "vitest"; import { - getSwaggerDependenciesMap, - getAffectedSwaggers, getAffectedServices, getService, reconcileChangedFilesAndTags, + getChangedSwaggers, + buildState, } from "../src/processChanges.js"; +import { ReadmeAffectedTags } from "../src/lintdiff-types.js"; import { isWindows } from "./test-util.js"; - -describe("getSwaggerDependenciesMap", () => { - test.skipIf(isWindows) - .concurrent("empty set on no .json files", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - console.log("dirname", __dirname); - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/empty", - ); - - expect(dependencyMap.size).toEqual(0); - }); - - test.skipIf(isWindows) - .concurrent("d has no dependencies", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - expect(dependencyMap.has("specification/1/d.json")).toEqual(true); - expect(dependencyMap.get("specification/1/d.json")).toEqual(new Set()); - }); - - test.skipIf(isWindows) - .concurrent("a depends on b and c (and d transitively)", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - expect(dependencyMap.has("specification/1/a.json")).toEqual(true); - expect(dependencyMap.get("specification/1/a.json")).toEqual( - new Set([ - "specification/1/nesting/b.json", - "specification/1/c.json", - // d.json is a dependency of a.json through b.json - "specification/1/d.json", - ]), - ); - }); - - test.skipIf(isWindows) - .concurrent("b depends on c and d", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - expect(dependencyMap.has("specification/1/nesting/b.json")).toEqual(true); - expect(dependencyMap.get("specification/1/nesting/b.json")).toEqual( - new Set(["specification/1/c.json", "specification/1/d.json"]), - ); - }); -}); - -describe("getAffectedSwaggers", () => { - test.skipIf(isWindows) - .concurrent("a affects only a", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - const affectedSwaggers = getAffectedSwaggers(["specification/1/a.json"], dependencyMap); - - expect(affectedSwaggers).toEqual(["specification/1/a.json"]); - }); - - test.skipIf(isWindows) - .concurrent("b affects a and b", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - const affectedSwaggers = getAffectedSwaggers(["specification/1/nesting/b.json"], dependencyMap); - - expect(affectedSwaggers).toEqual(["specification/1/nesting/b.json", "specification/1/a.json"]); - }); - - test.skipIf(isWindows) - .concurrent("c affects a, b, c", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - const affectedSwaggers = getAffectedSwaggers(["specification/1/c.json"], dependencyMap); - - expect(affectedSwaggers).toEqual([ - "specification/1/c.json", - "specification/1/a.json", - "specification/1/nesting/b.json", - ]); - }); - - test.skipIf(isWindows) - .concurrent("d affects a, b, d", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - const affectedSwaggers = getAffectedSwaggers(["specification/1/d.json"], dependencyMap); - - expect(affectedSwaggers).toEqual([ - "specification/1/d.json", - "specification/1/a.json", - "specification/1/nesting/b.json", - ]); - }); - - test.skipIf(isWindows) - .concurrent("d, c affects a, b, c, d", async ({ expect }) => { - const __dirname = new URL(".", import.meta.url).pathname; - const dependencyMap = await getSwaggerDependenciesMap( - join(__dirname, "fixtures/getSwaggerDependenciesMap"), - "specification/1", - ); - - const affectedSwaggers = getAffectedSwaggers( - ["specification/1/d.json", "specification/1/c.json"], - dependencyMap, - ); - - expect(affectedSwaggers).toEqual([ - "specification/1/d.json", - "specification/1/c.json", - "specification/1/a.json", - "specification/1/nesting/b.json", - ]); - }); -}); +import { Readme } from "@azure-tools/specs-shared/readme"; +import { resolve } from "node:path"; describe("getAffectedServices", () => { - test.skipIf(isWindows) - .concurrent("returns single service with multiple files", async ({ expect }) => { + test.skipIf(isWindows())("returns single service with multiple files", async () => { const changedFiles = ["specification/service1/file1.json", "specification/service1/file2.json"]; const affectedServices = await getAffectedServices(changedFiles); expect(affectedServices).toEqual(new Set(["specification/service1"])); }); - test.skipIf(isWindows) - .concurrent("returns multiple services", async ({ expect }) => { + test.skipIf(isWindows())("returns multiple services", async () => { const changedFiles = [ "specification/service1/file1.json", "specification/service1/file2.json", @@ -178,18 +36,16 @@ describe("getAffectedServices", () => { }); describe("getService", () => { - test.skipIf(isWindows) - .concurrent("returns service name from file path", async ({ expect }) => { + test.skipIf(isWindows())("returns service name from file path", async () => { const filePath = "specification/service1/file1.json"; const serviceName = await getService(filePath); expect(serviceName).toEqual("specification/service1"); }); - test.skipIf(isWindows) - .concurrent( + test.skipIf(isWindows())( "returns service name from file path with leading separator", - async ({ expect }) => { + async () => { const filePath = "/specification/service1/file1.json"; const serviceName = await getService(filePath); @@ -197,46 +53,209 @@ describe("getService", () => { }, ); - test.concurrent( - "throws when file path does not contain enough pieces to assemble a service name", - async ({ expect }) => { - const filePath = "file1.json"; - await expect(() => getService(filePath)).toThrow("Could not find service for file path"); - }, - ); + test("throws when file path does not contain enough pieces to assemble a service name", async () => { + const filePath = "file1.json"; + await expect(() => getService(filePath)).toThrow("Could not find service for file path"); + }); }); describe("reconcileChangedFilesAndTags", () => { - test.concurrent( - "if a tag is deleted in after and exists in before, remove the tag from before", - ({ expect }) => { - const before = new Map([["specification/1/readme.md", ["tag1", "tag2"]]]); - const after = new Map([["specification/1/readme.md", ["tag1"]]]); - - const [beforeFinal, afterFinal] = reconcileChangedFilesAndTags(before, after); - expect(beforeFinal).toEqual( - new Map([["specification/1/readme.md", ["tag1"]]]), - ); - expect(afterFinal).toEqual(after); - }, - ); + test("if a tag is deleted in after and exists in before, remove the tag from before", () => { + const before = new Map([ + [ + "specification/1/readme.md", + { + readme: new Readme("specification/1/readme.md"), + changedTags: new Set(["tag1", "tag2"]), + }, + ], + ]); + const after = new Map([ + [ + "specification/1/readme.md", + { + readme: new Readme("specification/1/readme.md"), + changedTags: new Set(["tag1"]), + }, + ], + ]); + + const [beforeFinal, afterFinal] = reconcileChangedFilesAndTags(before, after); + expect(beforeFinal).toEqual( + new Map([ + [ + "specification/1/readme.md", + expect.objectContaining({ + changedTags: new Set(["tag1"]), + }), + ], + ]), + ); + expect(afterFinal).toEqual(after); + }); - test.concurrent("does not change if there is no change", ({ expect }) => { - const before = new Map([["specification/1/readme.md", ["tag1", "tag2"]]]); - const after = new Map([["specification/1/readme.md", ["tag1", "tag2"]]]); + test("does not change if there is no change", () => { + const before = new Map([ + [ + "specification/1/readme.md", + { + readme: new Readme("specification/1/readme.md"), + changedTags: new Set(["tag1", "tag2"]), + }, + ], + ]); + const after = new Map([ + [ + "specification/1/readme.md", + { + readme: new Readme("specification/1/readme.md"), + changedTags: new Set(["tag1", "tag2"]), + }, + ], + ]); const [beforeFinal, afterFinal] = reconcileChangedFilesAndTags(before, after); expect(beforeFinal).toEqual(before); expect(afterFinal).toEqual(after); }); - // TODO: Test this and ensure the behavior matches - test.concurrent("keeps a specification in before if it is deleted in after", ({ expect }) => { - const before = new Map([["specification/1/readme.md", ["tag1", "tag2"]]]); - const after = new Map(); + test("keeps a specification in before if it is deleted in after", () => { + const before = new Map([ + [ + "specification/1/readme.md", + { + readme: new Readme("specification/1/readme.md"), + changedTags: new Set(["tag1", "tag2"]), + }, + ], + ]); + const after = new Map(); const [beforeFinal, afterFinal] = reconcileChangedFilesAndTags(before, after); - expect(beforeFinal).toEqual(beforeFinal); + expect(beforeFinal).toEqual(before); expect(afterFinal).toEqual(after); }); }); + +describe("getChangedSwaggers", () => { + test("returns an empty set if no swaggers are changed", async () => { + expect( + getChangedSwaggers( + "test/fixtures/getChangedSwaggers/before", + "test/fixtures/getChangedSwaggers/after", + new Set(), + ), + ).resolves.toEqual(new Set()); + }); + + test("excludes swaggers that are not changed", async () => { + const swaggers = await getChangedSwaggers( + "test/fixtures/getChangedSwaggers/before/", + "test/fixtures/getChangedSwaggers/after/", + new Set(["specification/service1/file1.json"]), + ); + expect(swaggers).toEqual(new Set()); + }); + + test("includes swaggers that don't exist in before", async () => { + const swaggers = await getChangedSwaggers( + "test/fixtures/getChangedSwaggers/before/", + "test/fixtures/getChangedSwaggers/after/", + new Set(["specification/service1/new-file.json"]), + ); + expect(swaggers).toEqual(new Set(["specification/service1/new-file.json"])); + }); + + test("includes swagger that has been changed", async () => { + const swaggers = await getChangedSwaggers( + "test/fixtures/getChangedSwaggers/before/", + "test/fixtures/getChangedSwaggers/after/", + new Set(["specification/service1/different.json"]), + ); + expect(swaggers).toEqual(new Set(["specification/service1/different.json"])); + }); + + test("includes swaggers that have a relevant changed dependency", async () => { + const swaggers = await getChangedSwaggers( + "test/fixtures/getChangedSwaggers/before/", + "test/fixtures/getChangedSwaggers/after/", + new Set([ + "specification/service1/with-dependency.json", + "specification/service1/changed-dependency.json", + ]), + ); + expect(swaggers).toEqual( + new Set([ + "specification/service1/with-dependency.json", + "specification/service1/changed-dependency.json", + ]), + ); + }); +}); + +describe("buildState", () => { + test.skipIf(isWindows())("returns output for a swagger edited in place", async () => { + const actual = await buildState( + ["specification/edit-in-place/data-plane/swagger.json"], + "test/fixtures/buildState/", + ); + + expect(actual).toMatchInlineSnapshot(` + [ + Map { + "specification/edit-in-place/readme.md" => { + "changedTags": Set { + "package-2022-12-01", + }, + "readme": Readme {}, + }, + }, + [ + "specification/edit-in-place/data-plane/swagger.json", + ], + ] + `); + }); + + test.skipIf(isWindows())("returns output for an edited readme", async () => { + const actual = await buildState( + ["specification/edit-in-place/readme.md"], + "test/fixtures/buildState/", + ); + + expect(actual).toMatchObject([ + new Map([ + [ + "specification/edit-in-place/readme.md", + { + changedTags: new Set(), + readme: expect.any(Readme), + }, + ], + ]), + [], + ]); + + expect(actual[0].get("specification/edit-in-place/readme.md")!.readme.path).toEqual( + resolve("test/fixtures/buildState/", "specification/edit-in-place/readme.md"), + ); + }); + + test("does not throw if a file is missing", async () => { + expect(() => + buildState( + ["specification/edit-in-place/data-plane/does-not-exist.json"], + "test/fixtures/buildState/", + ), + ).not.toThrow(); + }); + + test.skipIf(isWindows())("does not include readme files that has no input-file:", async () => { + const actual = await buildState( + ["specification/no-input-file/readme.md"], + "test/fixtures/buildState/", + ); + + expect(actual).toEqual([new Map(), []]); + }); +}); diff --git a/eng/tools/lint-diff/test/runChecks.test.ts b/eng/tools/lint-diff/test/runChecks.test.ts index 60c1bedf8384..c46ae153ddfc 100644 --- a/eng/tools/lint-diff/test/runChecks.test.ts +++ b/eng/tools/lint-diff/test/runChecks.test.ts @@ -1,14 +1,132 @@ -import { test, describe, expect } from "vitest"; +import { vi, test, describe, beforeEach, expect, Mock } from "vitest"; -import { executeCommand } from "../src/runChecks.js"; +vi.mock(import("@azure-tools/specs-shared/exec"), async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + execNpmExec: vi.fn(), + }; +}); + +vi.mock(import("../src/util.js"), async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + getPathToDependency: vi.fn(), + }; +}); -describe("executeCommand", () => { - test("executes and returns result", async () => { - const command = "echo 'hello world'"; +vi.mock(import("../src/markdown-utils.js"), async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + getOpenapiType: vi.fn(), + }; +}); + +import { runChecks, getAutorestErrors } from "../src/runChecks.js"; +import { AutorestRunResult } from "../src/lintdiff-types.js"; +import { execNpmExec } from "@azure-tools/specs-shared/exec"; +import { ReadmeAffectedTags } from "../src/lintdiff-types.js"; +import { Readme } from "@azure-tools/specs-shared/readme"; + +describe("runChecks", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); - const { error, stdout } = await executeCommand(command); + test("sets outputs properly on tag", async () => { + (execNpmExec as Mock).mockResolvedValue({ stdout: "out", stderr: "err" }); + const runList = new Map([ + ["readme.md", { readme: new Readme(""), changedTags: new Set(["tag1"]) }], + ]); + + const actual = await runChecks("root", runList); + expect(actual).toHaveLength(1); + expect(actual[0].error).toBeNull(); + expect(actual[0].stdout).toBe("out"); + expect(actual[0].stderr).toBe("err"); + + expect(execNpmExec).toHaveBeenCalledWith( + expect.arrayContaining([expect.stringContaining("--tag=tag1")]), + expect.anything(), + ); + }); + + test("coalesces null tag when no tags specified", async () => { + (execNpmExec as Mock).mockResolvedValue({ stdout: "", stderr: "" }); + const runList = new Map([ + ["readme.md", { readme: new Readme(""), changedTags: new Set() }], + ]); + + const actual = await runChecks("root", runList); + expect(actual).toHaveLength(1); + expect(execNpmExec).toHaveBeenCalledWith( + expect.not.arrayContaining([expect.stringContaining("--tag")]), + expect.anything(), + ); + }); + + test("error path populates error, stdout, stderr", async () => { + // Consturct an error object that will return true when passed to isExecError + const err = new Error(); + (err as any).stdout = "s"; + (err as any).stderr = "e"; + (err as any).code = 1; + + (execNpmExec as Mock).mockRejectedValue(err); + const runList = new Map([ + ["readme.md", { readme: new Readme(""), changedTags: new Set(["tag1", "tag2"]) }], + ]); + + const actual = await runChecks("root", runList); + expect(actual).toHaveLength(2); + actual.forEach((r) => { + expect(r.error).toBe(err); + expect(r.stdout).toBe("s"); + expect(r.stderr).toBe("e"); + }); + }); + + test("error path throws an error that isn't an ExecError", async () => { + (execNpmExec as Mock).mockRejectedValue({ + message: "some error for which isExecError returns false", + }); + const runList = new Map([ + ["readme.md", { readme: new Readme(""), changedTags: new Set(["tag1", "tag2"]) }], + ]); + expect(runChecks("root", runList)).rejects.toThrow(); + }); +}); + +describe("getAutorestErrors", () => { + test("filters only error and fatal levels", () => { + const lines = `{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"warning","message":"Use the latest version v6 of types.json.","code":"LatestVersionOfCommonTypesMustBeUsed","details":{"jsonpath":["definitions","SettingsResourceUpdate","allOf","0","$ref"],"validationCategory":"","providerNamespace":false,"resourceType":false,"rpcGuidelineCode":"","range":{"start":{"line":444,"column":18},"end":{"line":444,"column":111}}},"source":[{"document":"file:///mnt/vss/_work/1/azure-rest-api-specs-pr/specification/portalservices/resource-manager/Microsoft.PortalServices/settings/preview/2025-04-01-preview/settings.json","position":{"line":444,"column":11}}]} +{"pluginName":"spectral","extensionName":"@microsoft.azure/openapi-validator","level":"information","message":"spectralPluginFunc: Return"} +{"level":"fatal","message":"Process() cancelled due to failure "} +{"level":"error","message":"!Error: There are multiple operations defined for \\n 'get: /providers/Microsoft.PortalServices/operations'.\\n\\n You are probably trying to use an input with multiple API versions with an autorest V2 generator, and that will not work. "} +{"level":"error","message":"stack: Error: There are multiple operations defined for \\n 'get: /providers/Microsoft.PortalServices/operations'.\\n\\n You are probably trying to use an input with multiple API versions with an autorest V2 generator, and that will not work. \\n at NewComposer.visitPath (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:4371:23)\\n at NewComposer.visitPaths (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:4357:22)\\n at NewComposer.process (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:4305:26)\\n at NewComposer.runProcess (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:16339:28)\\n at NewComposer.getOutput (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:16259:9)\\n at compose (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:4624:56)\\n at ScheduleNode (/home/cloudtest/.autorest/@autorest_core@3.10.4/node_modules/@autorest/core/dist/src_lib_autorest-core_ts.js:1351:29)"} +{"level":"error","message":"Autorest completed with an error. If you think the error message is unclear, or is a bug, please declare an issues at https://github.com/Azure/autorest/issues with the error message you are seeing."}`; + + const runResult = { stdout: lines, stderr: "" } as any; + + const errors = getAutorestErrors(runResult); + expect(errors).toEqual( + expect.arrayContaining([ + expect.objectContaining({ level: "fatal", message: "Process() cancelled due to failure " }), + expect.objectContaining({ level: "error" }), + ]), + ); + + expect(errors).not.toEqual( + expect.arrayContaining([expect.objectContaining({ level: "information" })]), + ); + expect(errors).not.toEqual( + expect.arrayContaining([expect.objectContaining({ level: "warning" })]), + ); + }); - expect(error).toBeNull(); - expect(stdout).toMatch("hello world"); + test("returns empty when none", () => { + expect(getAutorestErrors({ stdout: "", stderr: "" } as AutorestRunResult)).toEqual([]); }); }); diff --git a/eng/tools/lint-diff/test/test-util.ts b/eng/tools/lint-diff/test/test-util.ts index 885288066313..e2a0313f3371 100644 --- a/eng/tools/lint-diff/test/test-util.ts +++ b/eng/tools/lint-diff/test/test-util.ts @@ -1,3 +1,3 @@ export function isWindows(): boolean { return process.platform === "win32"; -} \ No newline at end of file +} diff --git a/eng/tools/lint-diff/test/util.test.ts b/eng/tools/lint-diff/test/util.test.ts index 81ff71b25abc..80e6fde73fff 100644 --- a/eng/tools/lint-diff/test/util.test.ts +++ b/eng/tools/lint-diff/test/util.test.ts @@ -1,6 +1,6 @@ -import { test, describe, vi } from "vitest"; +import { test, describe, vi, expect } from "vitest"; import { vol } from "memfs"; -import { pathExists } from "../src/util.js"; +import { pathExists, isFailure, isWarning } from "../src/util.js"; import { beforeEach } from "node:test"; vi.mock("fs/promises", () => { @@ -15,7 +15,7 @@ describe("pathExists", () => { vol.reset(); }); - test.concurrent("returns true for existing path", async ({ expect }) => { + test("returns true for existing path", async () => { const files = { "./file-exists": "a", }; @@ -26,7 +26,7 @@ describe("pathExists", () => { expect(exists).toEqual(true); }); - test.concurrent("returns false for non-existing path", async ({ expect }) => { + test("returns false for non-existing path", async () => { const files = { "./file-exists": "a", }; @@ -37,3 +37,28 @@ describe("pathExists", () => { expect(exists).toEqual(false); }); }); + +describe("isFailure", () => { + // Data driven test + test.each([ + { level: "error", expected: true }, + { level: "fatal", expected: true }, + { level: "warning", expected: false }, + { level: "information", expected: false }, + { level: "info", expected: false }, + ])(`isFailure($level) returns $expected`, ({ level, expected }) => { + expect(isFailure(level)).toEqual(expected); + }); +}); + +describe("isWarning", () => { + test.each([ + { level: "error", expected: false }, + { level: "fatal", expected: false }, + { level: "warning", expected: true }, + { level: "information", expected: false }, + { level: "info", expected: false }, + ])(`isWarning($level) returns $expected`, ({ level, expected }) => { + expect(isWarning(level)).toEqual(expected); + }); +}); diff --git a/eng/tools/lint-diff/tsconfig.json b/eng/tools/lint-diff/tsconfig.json index a4185b87a411..585f66d32d76 100644 --- a/eng/tools/lint-diff/tsconfig.json +++ b/eng/tools/lint-diff/tsconfig.json @@ -2,21 +2,10 @@ "extends": "../tsconfig.json", "compilerOptions": { "outDir": "./dist", - "rootDir": "../../..", + "rootDir": ".", "noImplicitReturns": true, - "allowJs": true + "allowJs": true, }, - "include": [ - "../../../.github/src/changed-files.js", - "../../../.github/src/exec.js", - "../../../.github/src/git.js", - "../../../.github/src/types.js", - "**/src/*" - ], - "exclude": [ - "cmd", - "node_modules", - "dist", - "test" - ] + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], + "exclude": ["*.test.ts"], } diff --git a/eng/tools/lint-diff/vite.config.ts b/eng/tools/lint-diff/vite.config.ts deleted file mode 100644 index 5c76c1ed553b..000000000000 --- a/eng/tools/lint-diff/vite.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { defineConfig } from "vite"; -import { configDefaults } from "vitest/config"; - -export default defineConfig({ - test: { - ...configDefaults, - testTimeout: 20000, - coverage: { - provider: "v8", - reporter: ["text", "json", "html"], - }, - }, -}); diff --git a/eng/tools/oav-runner/README.md b/eng/tools/oav-runner/README.md new file mode 100644 index 000000000000..88f0a3f0e70a --- /dev/null +++ b/eng/tools/oav-runner/README.md @@ -0,0 +1,12 @@ +# `oav-runner` + +This is a simple wrapper script around the `oav` tool. It utilizes shared js code code modules from `.github/shared` to +determine a list of swagger specs that should be processed, processes them, then outputs necessary detailed run +information. + +## Invocation shortcuts + +``` +cd +npm ci && npm exec --no -- oav-runner <"specs"/"examples"> +``` diff --git a/eng/tools/oav-runner/cmd/oav-runner.js b/eng/tools/oav-runner/cmd/oav-runner.js new file mode 100755 index 000000000000..abdd7016b6cf --- /dev/null +++ b/eng/tools/oav-runner/cmd/oav-runner.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { main } from "../dist/src/cli.js"; + +await main(); diff --git a/eng/tools/oav-runner/package.json b/eng/tools/oav-runner/package.json new file mode 100644 index 000000000000..0f9a07d527da --- /dev/null +++ b/eng/tools/oav-runner/package.json @@ -0,0 +1,32 @@ +{ + "name": "@azure-tools/oav-runner", + "private": true, + "type": "module", + "main": "dist/src/main.js", + "bin": { + "oav-runner": "cmd/oav-runner.js" + }, + "scripts": { + "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", + "test": "vitest", + "test:ci": "vitest run --coverage --reporter=verbose" + }, + "dependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", + "js-yaml": "^4.1.0", + "oav": "^3.5.1", + "simple-git": "^3.27.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "prettier": "~3.5.3", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + }, + "engines": { + "node": ">=20.0.0" + } +} diff --git a/eng/tools/oav-runner/src/cli.ts b/eng/tools/oav-runner/src/cli.ts new file mode 100644 index 000000000000..a8a3f74e59a7 --- /dev/null +++ b/eng/tools/oav-runner/src/cli.ts @@ -0,0 +1,111 @@ +#!/usr/bin/env node + +import { checkSpecs, checkExamples } from "./runner.js"; +import { + outputAnnotatedErrors, + outputErrorSummary, + outputSuccessSummary, + ReportableOavError, +} from "./formatting.js"; + +import { resolve } from "path"; +import { parseArgs, ParseArgsConfig } from "node:util"; +import fs from "node:fs/promises"; +import { simpleGit } from "simple-git"; + +export async function getRootFolder(inputPath: string): Promise { + try { + const gitRoot = await simpleGit(inputPath).revparse("--show-toplevel"); + return resolve(gitRoot.trim()); + } catch (error) { + console.error( + `Error: Unable to determine the root folder of the git repository.`, + `Please ensure you are running this command within a git repository OR providing a targeted directory that is within a git repo.`, + ); + process.exit(1); + } +} + +export async function main() { + const config: ParseArgsConfig = { + options: { + targetDirectory: { + type: "string", + short: "d", + multiple: false, + default: process.cwd(), + }, + fileList: { + type: "string", + short: "f", + multiple: false, + default: undefined, + }, + }, + allowPositionals: true, + }; + + const { values: opts, positionals } = parseArgs(config); + // this option has a default value of process.cwd(), so we can assume it is always defined + // just need to resolve that here to make ts aware of it + const targetDirectory = opts.targetDirectory as string; + + const resolvedGitRoot = await getRootFolder(targetDirectory); + + let fileList: string[] | undefined = undefined; + if (opts.fileList !== undefined) { + const fileListPath = resolve(opts.fileList as string); + try { + const fileContent = await fs.readFile(fileListPath, { encoding: "utf-8" }); + fileList = fileContent + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0); + console.log(`Loaded ${fileList.length} files from ${opts.fileList}`); + } catch (error) { + console.error( + `Error reading file list from ${opts.fileList}: ${error instanceof Error ? error.message : String(error)}`, + ); + console.error("User provided file list that is not found."); + console.error( + "Please ensure the file exists and is readable, or do not provide the option 'fileList'", + ); + process.exit(1); + } + } + + // first positional is runType + const [runType] = positionals; + + if (runType !== "specs" && runType !== "examples") { + console.error("Error: must be either 'specs' or 'examples'."); + process.exit(1); + } + + console.log(`Running oav-runner against ${runType} within ${resolvedGitRoot}.`); + + let exitCode = 0; + let scannedSwaggerFiles: string[] = []; + let errorList: ReportableOavError[] = []; + let reportName = ""; + + if (runType === "specs") { + [exitCode, scannedSwaggerFiles, errorList] = await checkSpecs(resolvedGitRoot, fileList); + reportName = "Swagger SemanticValidation"; + } else if (runType === "examples") { + [exitCode, scannedSwaggerFiles, errorList] = await checkExamples(resolvedGitRoot, fileList); + reportName = "Swagger ModelValidation"; + } + + if (errorList.length > 0) { + // print the errors so that they will annotate the files on github UI interface + outputAnnotatedErrors(errorList); + + // print the errors in a summary report that we can later output to + outputErrorSummary(errorList, reportName); + } else { + outputSuccessSummary(scannedSwaggerFiles, reportName); + } + + process.exit(exitCode); +} diff --git a/eng/tools/oav-runner/src/formatting.ts b/eng/tools/oav-runner/src/formatting.ts new file mode 100644 index 000000000000..49816944a3f4 --- /dev/null +++ b/eng/tools/oav-runner/src/formatting.ts @@ -0,0 +1,93 @@ +import { annotateFileError, setSummary } from "@azure-tools/specs-shared/error-reporting"; + +export interface ReportableOavError { + message: string; + file: string; + errorCode?: string; + line?: number; + column?: number; +} + +export function outputAnnotatedErrors(errors: ReportableOavError[]) { + errors.forEach((error) => { + let msg: string = `${error.message}`; + + if (error.errorCode) { + msg = `${error.errorCode}: ${msg}`; + } + + // we only attempt an in-place annotation if we have the line and column associated with the error + // otherwise we just depend upon the summary report to show the error + if (error.line && error.column) { + annotateFileError(error.file, msg, error.line, error.column); + } + }); +} + +export function outputSuccessSummary(swaggerFiles: string[], reportName: string) { + let builtLines: string[] = []; + + builtLines.push(`## All specifications passed ${reportName}`); + builtLines.push("| File | Status |"); + builtLines.push("| --- | --- |"); + for (const swaggerFile of swaggerFiles) { + builtLines.push(`| ${swaggerFile} | ✅ |`); + } + + const summaryResult = builtLines.join("\n"); + + if (process.env.GITHUB_STEP_SUMMARY) { + setSummary(summaryResult); + } else { + console.log(summaryResult); + } +} + +export function outputErrorSummary(errors: ReportableOavError[], reportName: string) { + let builtLines: string[] = []; + let checkName: string = ""; + + builtLines.push(`## Error Summary - ${reportName}`); + + // just mapping the report names we want to migrate to the old names here, so we don't have to pull it through everywhere when we want to change it + if (reportName === "Swagger SemanticValidation") { + checkName = "validate-spec"; + } else if (reportName === "Swagger ModelValidation") { + checkName = "validate-example"; + } + + builtLines.push(`⚠️ This check is testing a new version of '${reportName}'. ⚠️`); + builtLines.push( + "Failures are expected, and should be completely ignored by spec authors and reviewers.", + ); + builtLines.push(`Meaningful results for this PR are in required check '${reportName}'.`); + builtLines.push("| File | Line#Column | Code | Message |"); + builtLines.push("| --- | --- | --- | --- |"); + + // sort the errors by file name then by error code + errors.sort((a, b) => { + const nameCompare = a.file.localeCompare(b.file); + if (nameCompare !== 0) { + return nameCompare; + } + return (a.errorCode || "").localeCompare(b.errorCode || ""); + }); + + errors.forEach((error) => { + const fmtLineCol = error.line && error.column ? `${error.line}#${error.column}` : "N/A"; + builtLines.push(`| ${error.file} | ${fmtLineCol} | ${error.errorCode} | ${error.message} |`); + }); + + builtLines.push("\n"); + builtLines.push( + `> [!IMPORTANT]\n> Repro any individual file's worth of errors by invoking \`npx oav ${checkName} \` from the root of the rest-api-specs repo.`, + ); + + const summaryResult = builtLines.join("\n"); + + if (process.env.GITHUB_STEP_SUMMARY) { + setSummary(summaryResult); + } else { + console.log(summaryResult); + } +} diff --git a/eng/tools/oav-runner/src/runner.ts b/eng/tools/oav-runner/src/runner.ts new file mode 100644 index 000000000000..a9c4e191a57f --- /dev/null +++ b/eng/tools/oav-runner/src/runner.ts @@ -0,0 +1,213 @@ +#!/usr/bin/env node + +import * as oav from "oav"; +import * as path from "path"; +import * as fs from "fs"; + +import { Swagger } from "@azure-tools/specs-shared/swagger"; +import { includesFolder } from "@azure-tools/specs-shared/path"; +import { getChangedFiles } from "@azure-tools/specs-shared/changed-files"; //getChangedFiles, +import { ReportableOavError } from "./formatting.js"; + +export async function preCheckFiltering( + rootDirectory: string, + fileList?: string[], +): Promise { + const changedFiles = fileList ?? (await getChangedFiles({ cwd: rootDirectory })); + + const swaggerFiles = await processFilesToSpecificationList(rootDirectory, changedFiles); + + console.log("oav-runner is checking the following specification rooted files:"); + swaggerFiles.forEach((file) => console.log(`- ${file}`)); + + return swaggerFiles; +} + +export async function checkExamples( + rootDirectory: string, + fileList?: string[], +): Promise<[number, string[], ReportableOavError[]]> { + let errors: ReportableOavError[] = []; + + const swaggerFiles = await preCheckFiltering(rootDirectory, fileList); + + for (const swaggerFile of swaggerFiles) { + try { + const errorResults = await oav.validateExamples(swaggerFile, undefined); + + for (const error of errorResults || []) { + errors.push({ + message: error.message, + errorCode: error.code, + file: error.exampleUrl, + line: error.examplePosition?.line, + column: error.examplePosition?.column, + } as ReportableOavError); + } + } catch (e) { + if (e instanceof Error) { + console.log(`Error validating examples for ${swaggerFile}: ${e.message}`); + errors.push({ + message: e.message, + file: swaggerFile, + } as ReportableOavError); + } else { + console.log(`Error validating examples for ${swaggerFile}: ${e}`); + errors.push({ + message: `Unhandled error validating ${swaggerFile}: ${e}`, + file: swaggerFile, + } as ReportableOavError); + } + } + } + + if (errors.length > 0) { + return [1, swaggerFiles, errors]; + } + return [0, swaggerFiles, []]; +} + +export async function checkSpecs( + rootDirectory: string, + fileList?: string[], +): Promise<[number, string[], ReportableOavError[]]> { + let errors: ReportableOavError[] = []; + + const swaggerFiles = await preCheckFiltering(rootDirectory, fileList); + + for (const swaggerFile of swaggerFiles) { + try { + const errorResults = await oav.validateSpec(swaggerFile, undefined); + if (errorResults.validateSpec && errorResults.validateSpec.errors) { + for (const error of errorResults.validateSpec.errors) { + errors.push({ + message: error.message, + errorCode: error.code, + file: swaggerFile, + line: error.position?.line, + column: error.position?.column, + } as ReportableOavError); + } + } + } catch (e) { + if (e instanceof Error) { + console.log(`Error validating ${swaggerFile}: ${e.message}`); + errors.push({ + message: e.message, + file: swaggerFile, + } as ReportableOavError); + } else { + console.log(`Error validating ${swaggerFile}: ${e}`); + errors.push({ + message: `Unhandled error validating ${swaggerFile}: ${e}`, + file: swaggerFile, + } as ReportableOavError); + } + } + } + + if (errors.length > 0) { + return [1, swaggerFiles, errors]; + } + return [0, swaggerFiles, []]; +} + +async function getFiles(rootDirectory: string, directory: string): Promise { + const target = path.join(rootDirectory, directory); + const items = await fs.promises.readdir(target, { + withFileTypes: true, + }); + + return items + .filter((d) => d.isFile() && d.name.endsWith(".json")) + .map((d) => path.join(target, d.name)) + .map((d) => d.replace(/^.*?(specification[\/\\].*)$/, "$1")) + .filter((d) => d.includes("specification" + path.sep)); +} + +function example(file: string): boolean { + return ( + typeof file === "string" && + file.toLowerCase().endsWith(".json") && + includesFolder(file, "examples") + ); +} + +function swagger(file: string): boolean { + return ( + typeof file === "string" && + file.toLowerCase().endsWith(".json") && + (includesFolder(file, "data-plane") || includesFolder(file, "resource-manager")) && + includesFolder(file, "specification") && + !includesFolder(file, "examples") + ); +} + +export async function processFilesToSpecificationList( + rootDirectory: string, + files: string[], +): Promise { + const cachedSwaggerSpecs = new Map(); + const resultFiles: string[] = []; + const additionalSwaggerFiles: string[] = []; + + // files from get-changed-files are relative to the root of the repo, + // though that context is passed into this from cli arguments. + for (const file of files) { + if (!file.startsWith("specification/")) { + continue; + } + + const absoluteFilePath = path.join(rootDirectory, file); + + // if the file is an example, we need to find the swagger file that references it + if (example(file)) { + /* + examples exist in the same directory as the swagger file that references them: + + path/to/swagger/2024-01-01/examples/example.json <-- this is an example file path + path/to/swagger/2024-01-01/swagger.json <-- we need to identify this file if it references the example + path/to/swagger/2024-01-01/swagger2.json <-- and do nothing with this one + */ + const swaggerDir = path.dirname(path.dirname(file)); + + const visibleSwaggerFiles = await getFiles(rootDirectory, swaggerDir); + + for (const swaggerFile of visibleSwaggerFiles) { + if (!cachedSwaggerSpecs.has(swaggerFile)) { + const swaggerModel = new Swagger(path.join(rootDirectory, swaggerFile)); + try { + const exampleSwaggers = await swaggerModel.getExamples(); + const examples = [...exampleSwaggers.keys()]; + cachedSwaggerSpecs.set(swaggerFile, examples); + } catch (e) { + console.log( + `Error getting examples for ${swaggerFile}: ${e instanceof Error ? e.message : String(e)}`, + ); + // if we can't get the examples, we just skip this file + continue; + } + } + const referencedExamples = cachedSwaggerSpecs.get(swaggerFile); + + // the resolved files are absolute paths, so to compare them to the file we're looking at, we need + // to use the absolute path version of the example file. + if (referencedExamples?.indexOf(absoluteFilePath) !== -1) { + // unfortunately, we get lists of files in posix format from get-changed-files. because of this, when are are grabbing a + // resolved swagger file, we need to ensure we are using the posix version of the path as well. If we do not do this, + // if we change an example and a spec, we will end up resolving the changed spec twice, one with the posix path (from changed-files) + // and one with the windows path (resolved from the swagger model which we pulled refs from to determine which example belonged to which swagger) + additionalSwaggerFiles.push(swaggerFile.replace(/\\/g, "/")); + } + } + } + + // finally handle our base case where the file we're examining is itself a swagger file + if (swagger(file) && fs.existsSync(absoluteFilePath)) { + resultFiles.push(file); + } + } + + // combine and make the results unique + return Array.from(new Set([...resultFiles, ...additionalSwaggerFiles])); +} diff --git a/eng/tools/oav-runner/test/cli.test.ts b/eng/tools/oav-runner/test/cli.test.ts new file mode 100644 index 000000000000..34ac9b9d93d9 --- /dev/null +++ b/eng/tools/oav-runner/test/cli.test.ts @@ -0,0 +1,31 @@ +import { describe, it, expect, vi } from "vitest"; +import { getRootFolder } from "../src/cli.js"; +import path from "path"; + +const REPOROOT = path.resolve(__dirname, "..", "..", "..", ".."); + +describe("invocation directory checks", () => { + it("Should return the same path when invoked from the root of a git repo.", async () => { + const result = await getRootFolder(REPOROOT); + expect(result).toBe(REPOROOT); + }); + + it("Should return a higher path when invoked from a path deep in a git repo.", async () => { + const result = await getRootFolder(path.join(REPOROOT, "eng", "tools", "oav-runner")); + expect(result).toBe(REPOROOT); + }); + + it("Should exit with error when invoked outside of a git directory.", async () => { + const pathOutsideRepo = path.resolve(path.join(REPOROOT, "..")); + + const exitMock = vi + .spyOn(process, "exit") + .mockImplementation((code?: string | number | null | undefined) => { + throw new Error(`Exit ${code}`); + }); + + await expect(getRootFolder(pathOutsideRepo)).rejects.toThrow("Exit 1"); + + exitMock.mockRestore(); + }); +}); diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceA/resource-manager/service.A/readme.md b/eng/tools/oav-runner/test/fixtures/specification/serviceA/resource-manager/service.A/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceA/resource-manager/service.A/stable/2025-06-01/serviceAspec.json b/eng/tools/oav-runner/test/fixtures/specification/serviceA/resource-manager/service.A/stable/2025-06-01/serviceAspec.json new file mode 100644 index 000000000000..13b5261fe80e --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceA/resource-manager/service.A/stable/2025-06-01/serviceAspec.json @@ -0,0 +1,35 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service A", + "version": "1.0.0" + }, + "paths": { + "/c": { + "get": { + "summary": "Get A", + "responses": { + "200": { + "description": "Successful response", + "schema": { + "$ref": "#/definitions/C" + } + } + } + } + } + }, + "definitions": { + "C": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + } + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/readme.md b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/CreateResource.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/CreateResource.json new file mode 100644 index 000000000000..e1388a21c597 --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/CreateResource.json @@ -0,0 +1,27 @@ +{ + "parameters": { + "api-version": "2025-06-01", + "resource": { + "name": "New Resource", + "properties": { + "description": "A new resource created via the API", + "tags": ["test", "new", "sample"] + } + } + }, + "responses": { + "201": { + "body": { + "id": "resource-456", + "name": "New Resource", + "type": "ServiceB/Resource", + "properties": { + "description": "A new resource created via the API", + "tags": ["test", "new", "sample"], + "status": "Provisioning", + "createdAt": "2025-06-02T10:30:00Z" + } + } + } + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/DeleteResource.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/DeleteResource.json new file mode 100644 index 000000000000..2d92d40d1c19 --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/DeleteResource.json @@ -0,0 +1,9 @@ +{ + "parameters": { + "api-version": "2025-06-01", + "resourceId": "resource-123" + }, + "responses": { + "204": {} + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetResource.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetResource.json new file mode 100644 index 000000000000..9187a08b00a6 --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetResource.json @@ -0,0 +1,20 @@ +{ + "parameters": { + "api-version": "2025-06-01", + "resourceId": "resource-123" + }, + "responses": { + "200": { + "body": { + "id": "resource-123", + "name": "Example Resource", + "type": "ServiceB/Resource", + "properties": { + "status": "Active", + "createdAt": "2025-05-30T15:30:45Z", + "lastModifiedAt": "2025-06-01T09:15:22Z" + } + } + } + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetRoot.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetRoot.json new file mode 100644 index 000000000000..c0dcb5926703 --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetRoot.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "api-version": "2025-06-01" + }, + "responses": { + "200": { + "body": { + "status": "OK", + "message": "Service is running", + "version": "1.0.0" + } + } + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/ListResources.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/ListResources.json new file mode 100644 index 000000000000..d0d3cfbc0a5d --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/ListResources.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "api-version": "2025-06-01", + "$skip": 0, + "$top": 10 + }, + "responses": { + "200": { + "body": { + "value": [ + { + "id": "resource-123", + "name": "Example Resource", + "type": "ServiceB/Resource", + "properties": { + "status": "Active", + "createdAt": "2025-05-30T15:30:45Z" + } + }, + { + "id": "resource-456", + "name": "New Resource", + "type": "ServiceB/Resource", + "properties": { + "status": "Provisioning", + "createdAt": "2025-06-02T10:30:00Z" + } + } + ], + "nextLink": "https://service.b/api/resources?api-version=2025-06-01&$skip=10&$top=10" + } + } + } +} diff --git a/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json new file mode 100644 index 000000000000..f354591eaba6 --- /dev/null +++ b/eng/tools/oav-runner/test/fixtures/specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json @@ -0,0 +1,409 @@ +{ + "swagger": "2.0", + "info": { + "title": "Service B", + "version": "1.0.0", + "description": "API for Service B data plane operations" + }, + "host": "service.b", + "schemes": ["https"], + "consumes": ["application/json"], + "produces": ["application/json"], + "paths": { + "/": { + "get": { + "tags": ["Status"], + "summary": "Get Service Status", + "description": "Returns the current status of the service.", + "operationId": "Service_GetStatus", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns service status information.", + "schema": { + "$ref": "#/definitions/ServiceStatus" + } + }, + "default": { + "description": "Error response", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Get service status": { + "$ref": "./examples/GetRoot.json" + } + } + } + }, + "/resources": { + "get": { + "tags": ["Resources"], + "summary": "List Resources", + "description": "Lists all resources in the service.", + "operationId": "Resources_List", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "$skip", + "in": "query", + "description": "Skip the first n items", + "type": "integer", + "default": 0, + "minimum": 0 + }, + { + "name": "$top", + "in": "query", + "description": "Return only the first n items", + "type": "integer", + "default": 10, + "minimum": 1, + "maximum": 100 + } + ], + "responses": { + "200": { + "description": "OK - Returns a list of resources", + "schema": { + "$ref": "#/definitions/ResourceList" + } + }, + "default": { + "description": "Error response", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "List resources": { + "$ref": "./examples/ListResources.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + }, + "post": { + "tags": ["Resources"], + "summary": "Create a Resource", + "description": "Creates a new resource in the service.", + "operationId": "Resources_Create", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "resource", + "in": "body", + "description": "Resource to create", + "required": true, + "schema": { + "$ref": "#/definitions/ResourceCreateRequest" + } + } + ], + "responses": { + "201": { + "description": "Created - Returns the created resource", + "schema": { + "$ref": "#/definitions/Resource" + } + }, + "default": { + "description": "Error response", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Create resource": { + "$ref": "./examples/CreateResource.json" + } + } + } + }, + "/resources/{resourceId}": { + "get": { + "tags": ["Resources"], + "summary": "Get Resource", + "description": "Gets a specific resource by its ID.", + "operationId": "Resources_Get", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "resourceId", + "in": "path", + "description": "ID of the resource to retrieve", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "OK - Returns the requested resource", + "schema": { + "$ref": "#/definitions/Resource" + } + }, + "404": { + "description": "Not Found - The resource does not exist", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + }, + "default": { + "description": "Error response", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Get resource": { + "$ref": "./examples/GetResource.json" + } + } + }, + "delete": { + "tags": ["Resources"], + "summary": "Delete Resource", + "description": "Deletes a specific resource by its ID.", + "operationId": "Resources_Delete", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "resourceId", + "in": "path", + "description": "ID of the resource to delete", + "required": true, + "type": "string" + } + ], + "responses": { + "204": { + "description": "No Content - The resource was successfully deleted" + }, + "404": { + "description": "Not Found - The resource does not exist", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + }, + "default": { + "description": "Error response", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Delete resource": { + "$ref": "./examples/DeleteResource.json" + } + } + } + } + }, + "definitions": { + "ServiceStatus": { + "description": "Represents the status of the service", + "type": "object", + "properties": { + "status": { + "description": "The status of the service", + "type": "string", + "enum": ["OK", "Degraded", "Unavailable"], + "x-ms-enum": { + "name": "ServiceStatusEnum", + "modelAsString": true + } + }, + "message": { + "description": "A message providing details about the service status", + "type": "string" + }, + "version": { + "description": "The version of the service", + "type": "string" + } + } + }, + "Resource": { + "description": "Represents a resource in Service B", + "type": "object", + "properties": { + "id": { + "description": "The unique identifier of the resource", + "type": "string", + "readOnly": true + }, + "name": { + "description": "The name of the resource", + "type": "string" + }, + "type": { + "description": "The type of the resource", + "type": "string", + "readOnly": true + }, + "properties": { + "description": "The properties of the resource", + "type": "object", + "properties": { + "description": { + "description": "Description of the resource", + "type": "string" + }, + "status": { + "description": "The status of the resource", + "type": "string", + "enum": ["Active", "Inactive", "Provisioning", "Failed"], + "x-ms-enum": { + "name": "ResourceStatusEnum", + "modelAsString": true + } + }, + "tags": { + "description": "Tags associated with the resource", + "type": "array", + "items": { + "type": "string" + } + }, + "createdAt": { + "description": "The timestamp when the resource was created", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastModifiedAt": { + "description": "The timestamp when the resource was last modified", + "type": "string", + "format": "date-time", + "readOnly": true + } + } + } + } + }, + "ResourceCreateRequest": { + "description": "Request body for creating a resource", + "type": "object", + "required": ["name"], + "properties": { + "name": { + "description": "The name of the resource", + "type": "string" + }, + "properties": { + "description": "The properties of the resource", + "type": "object", + "properties": { + "description": { + "description": "Description of the resource", + "type": "string" + }, + "tags": { + "description": "Tags associated with the resource", + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + }, + "ResourceList": { + "description": "A paged list of resources", + "type": "object", + "properties": { + "value": { + "description": "The list of resources", + "type": "array", + "items": { + "$ref": "#/definitions/Resource" + } + }, + "nextLink": { + "description": "The URL to get the next set of results, if there are any", + "type": "string" + } + } + }, + "ErrorResponse": { + "description": "Error response", + "type": "object", + "properties": { + "error": { + "description": "The error details", + "type": "object", + "properties": { + "code": { + "description": "Error code", + "type": "string" + }, + "message": { + "description": "Error message", + "type": "string" + }, + "target": { + "description": "Error target", + "type": "string" + }, + "details": { + "description": "Error details", + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDetail" + } + } + } + } + } + }, + "ErrorDetail": { + "description": "Error detail", + "type": "object", + "properties": { + "code": { + "description": "Error code", + "type": "string" + }, + "message": { + "description": "Error message", + "type": "string" + }, + "target": { + "description": "Error target", + "type": "string" + } + } + } + }, + "parameters": { + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation", + "required": true, + "type": "string", + "default": "2025-06-01" + } + } +} diff --git a/eng/tools/oav-runner/test/runner.test.ts b/eng/tools/oav-runner/test/runner.test.ts new file mode 100644 index 000000000000..a01e211babfe --- /dev/null +++ b/eng/tools/oav-runner/test/runner.test.ts @@ -0,0 +1,73 @@ +import { describe, it, expect } from "vitest"; +import { processFilesToSpecificationList } from "../src/runner.js"; +import path from "path"; + +const ROOT = path.resolve(__dirname, "..", "test", "fixtures"); + +describe("file processing", () => { + it("should process a basic set of files and return a list of swagger files only", async () => { + const changedFiles = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + "specification/serviceB/data-plane/service.B/readme.md", + ]; + const expected = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + ]; + + const result = await processFilesToSpecificationList(ROOT, changedFiles); + expect(result).toEqual(expected); + }); + + it("should process a larger set of files and return a list of expected resolved swagger files", async () => { + const changedFiles = [ + "specification/serviceA/resource-manager/service.A/stable/2025-06-01/serviceAspec.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/CreateResource.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/DeleteResource.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetResource.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/GetRoot.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/ListResources.json", + ]; + const expected = [ + "specification/serviceA/resource-manager/service.A/stable/2025-06-01/serviceAspec.json", + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + ]; + + const result = await processFilesToSpecificationList(ROOT, changedFiles); + expect(result).toEqual(expected); + }); + + it("should process the correct swagger file given only changed example files", async () => { + const changedFiles = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/examples/CreateResource.json", + ]; + const expected = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + ]; + + const result = await processFilesToSpecificationList(ROOT, changedFiles); + expect(result).toEqual(expected); + }); + + it("should process the correct swagger file given only changed readme file", async () => { + const changedFiles = ["specification/serviceB/data-plane/service.B/readme.md"]; + const expected: string[] = []; + + const result = await processFilesToSpecificationList(ROOT, changedFiles); + expect(result).toEqual(expected); + }); + + it("should handle deleted files without error", async () => { + const changedFiles = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + // non-existent file. Should not throw and quietly omit + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspecDeleted.json", + ]; + const expected = [ + "specification/serviceB/data-plane/service.B/stable/2025-06-01/serviceBspec.json", + ]; + + const result = await processFilesToSpecificationList(ROOT, changedFiles); + expect(result).toEqual(expected); + }); +}); diff --git a/eng/tools/oav-runner/tsconfig.json b/eng/tools/oav-runner/tsconfig.json new file mode 100644 index 000000000000..5f48d4c6a5b5 --- /dev/null +++ b/eng/tools/oav-runner/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "allowJs": true, + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], +} diff --git a/eng/tools/openapi-diff-runner/README.md b/eng/tools/openapi-diff-runner/README.md new file mode 100644 index 000000000000..6e08744f8640 --- /dev/null +++ b/eng/tools/openapi-diff-runner/README.md @@ -0,0 +1,98 @@ +# OpenAPI Diff Runner + +A tool for detecting breaking changes in OpenAPI specifications by comparing different versions and analyzing the +differences using @azure/oad library. + +## Overview + +The OpenAPI Diff Runner is designed to: + +- Compare OpenAPI specifications between different versions +- Generate detailed reports of comparing result +- Support both same-version and cross-version breaking change detection +- Integrate with GitHub workflow for automated validation + +## Installation + +```bash +# Install dependencies +npm ci + +# Build the project +npm run build +``` + +## Usage + +### Command Line Interface + +```bash +# Basic usage +npx openapi-diff-runner --srp --repo --number + +# Example +npx openapi-diff-runner \ + --srp /path/to/azure-rest-api-specs \ + --repo Azure/azure-rest-api-specs \ + --number 12345 \ + --bb main \ + --rt SameVersion +``` + +### Command Line Options + +| Option | Description | Default | +| ---------- | ----------------------------------- | ---------------------------- | +| `--srp` | Spec repository path | `../` | +| `--repo` | GitHub repository | `azure/azure-rest-api-specs` | +| `--number` | Pull request number | Required | +| `--bb` | Base branch | `main` | +| `--rt` | Run type (SameVersion/CrossVersion) | `SameVersion` | +| `--hc` | Head commit | `HEAD` | +| `--sb` | Source branch | From PR | +| `--tb` | Target branch | From PR | + +## Breaking Change Types + +### Same Version Breaking Changes + +- Changes within the same API version that break backward compatibility +- Examples: Removing properties, changing required fields, modifying response schemas + +### Cross Version Breaking Changes + +- Changes between different API versions +- Helps ensure proper versioning and migration paths + +### Workflow + +1. **Initialize Context**: Parse command line arguments and setup environment +2. **Setup PR Info**: Fetch pull request details and prepare Git workspace +3. **Detect Changes**: Use OAD (OpenAPI Analysis) to compare specifications +4. **Apply Rules**: Process detected changes through rule engine +5. **Generate Report**: Create detailed output with violations and recommendations + +## Development + +### Prerequisites + +- Node.js >= 20.0.0 +- npm +- .NET 6 +- Git + +### Building + +```bash +# Build TypeScript files +npm run build + +# Run tests +npm test + +# Run tests with coverage +npm run test:ci + +# Lint code +npm run prettier +``` diff --git a/eng/tools/openapi-diff-runner/cmd/openapi-diff-runner.js b/eng/tools/openapi-diff-runner/cmd/openapi-diff-runner.js new file mode 100755 index 000000000000..e2a37e0b5491 --- /dev/null +++ b/eng/tools/openapi-diff-runner/cmd/openapi-diff-runner.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { main } from "../dist/src/index.js"; + +await main(); diff --git a/eng/tools/openapi-diff-runner/package.json b/eng/tools/openapi-diff-runner/package.json new file mode 100644 index 000000000000..b7fd0e0a19d4 --- /dev/null +++ b/eng/tools/openapi-diff-runner/package.json @@ -0,0 +1,31 @@ +{ + "name": "@azure-tools/openapi-diff-runner", + "private": true, + "type": "module", + "main": "dist/src/index.js", + "bin": { + "openapi-diff-runner": "cmd/openapi-diff-runner.js" + }, + "scripts": { + "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", + "test": "vitest", + "test:ci": "vitest --coverage --reporter=verbose" + }, + "engines": { + "node": ">=20.0.0" + }, + "dependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", + "@azure/oad": "0.10.14" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + } +} diff --git a/eng/tools/openapi-diff-runner/src/command-helpers.ts b/eng/tools/openapi-diff-runner/src/command-helpers.ts new file mode 100644 index 000000000000..7af38e643498 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/command-helpers.ts @@ -0,0 +1,276 @@ +import path from "node:path"; +import { existsSync, mkdirSync, readFileSync, writeFileSync, rmSync } from "node:fs"; +import { fileURLToPath } from "node:url"; +import { + BreakingChangesCheckType, + Context, + BreakingChangeReviewRequiredLabel, + VersioningReviewRequiredLabel, +} from "./types/breaking-change.js"; +import { ResultMessageRecord } from "./types/message.js"; +import { getArgumentValue } from "./utils/common-utils.js"; +import { createOadMessageProcessor } from "./utils/oad-message-processor.js"; +import { createPullRequestProperties } from "./utils/pull-request.js"; +import { getChangedFilesStatuses, swagger } from "@azure-tools/specs-shared/changed-files"; +import { logMessage, setOutput } from "./log.js"; + +/** + * Parse the arguments. + * @returns The runner command input. + */ +export function initContext(): Context { + const __filename: string = fileURLToPath(import.meta.url); + const __dirname: string = path.dirname(__filename); + + // Get the arguments passed to the script + const args: string[] = process.argv.slice(2); + const localSpecRepoPath: string = path.resolve( + getArgumentValue(args, "--srp", path.join(__dirname, "..")), + ); + const swaggerDirs: string[] = ["specification", "dev"]; + const repo: string = getArgumentValue(args, "--repo", "azure/azure-rest-api-specs"); + const prNumber: string = getArgumentValue(args, "--number", ""); + const runType = getArgumentValue(args, "--rt", "SameVersion") as BreakingChangesCheckType; + const workingFolder: string = path.join(localSpecRepoPath, ".."); + const logFileFolder: string = path.join(workingFolder, "out/logs"); + + // Create the log file folder if it does not exist + if (!existsSync(logFileFolder)) { + mkdirSync(logFileFolder, { recursive: true }); + } + + const prUrl = `https://github.com/${repo}/pull/${prNumber}`; + const oadMessageProcessorContext = createOadMessageProcessor(logFileFolder, prUrl); + return { + localSpecRepoPath, + workingFolder, + swaggerDirs, + logFileFolder, + baseBranch: getArgumentValue(args, "--bb", "main"), + runType, + checkName: getBreakingChangeCheckName(runType), + headCommit: getArgumentValue(args, "--hc", "HEAD"), + repo, + prNumber, + prSourceBranch: getArgumentValue(args, "--sb", ""), + prTargetBranch: getArgumentValue(args, "--tb", ""), + oadMessageProcessorContext, + prUrl, + }; +} + +/** + * This set contains labels denoting which kind of review is required. + * + * Appropriate labels are added to this set by applyRules() function. + */ +export const BreakingChangeLabelsToBeAdded = new Set(); +export let defaultBreakingChangeBaseBranch = "main"; +function getBreakingChangeCheckName(runType: BreakingChangesCheckType): string { + return runType === "SameVersion" ? "Swagger BreakingChange" : "BreakingChange(Cross-Version)"; +} + +/** + * Output the breaking change labels as GitHub Actions environment variables. + * This function checks the BreakingChangeLabelsToBeAdded set and sets the appropriate outputs. + */ +export function outputBreakingChangeLabelVariables(): void { + // Output the breaking change labels as GitHub Actions environment variables + if (BreakingChangeLabelsToBeAdded.size === 0) { + logMessage("None of the breaking change review labels need to be added."); + logMessage("Setting default breaking change labels to false."); + setOutput("breakingChangeReviewLabelName", BreakingChangeReviewRequiredLabel); + setOutput("breakingChangeReviewLabelValue", "false"); + setOutput("versioningReviewLabelName", VersioningReviewRequiredLabel); + setOutput("versioningReviewLabelValue", "false"); + } else { + if (BreakingChangeLabelsToBeAdded.has(BreakingChangeReviewRequiredLabel)) { + logMessage("'BreakingChangeReviewRequired' label needs to be added."); + setOutput("breakingChangeReviewLabelName", BreakingChangeReviewRequiredLabel); + setOutput("breakingChangeReviewLabelValue", "true"); + } else { + logMessage("'BreakingChangeReviewRequired' label needs to be deleted."); + setOutput("breakingChangeReviewLabelName", BreakingChangeReviewRequiredLabel); + setOutput("breakingChangeReviewLabelValue", "false"); + } + if (BreakingChangeLabelsToBeAdded.has(VersioningReviewRequiredLabel)) { + logMessage("'VersioningReviewRequired' label needs to be added."); + setOutput("versioningReviewLabelName", VersioningReviewRequiredLabel); + setOutput("versioningReviewLabelValue", "true"); + } else { + logMessage("'VersioningReviewRequired' label needs to be deleted."); + setOutput("versioningReviewLabelName", VersioningReviewRequiredLabel); + setOutput("versioningReviewLabelValue", "false"); + } + } +} + +/** + * Get categorized changed files by calling the shared getCategorizedChangedFiles function. + * Filters results to only include Swagger/OpenAPI files using the swagger filter from changed-files.js + * @param options - Options for getting changed files + * @param options.baseCommitish - Base commit to compare from (default: "HEAD^") + * @param options.cwd - Current working directory (default: process.cwd()) + * @param options.headCommitish - Head commit to compare to (default: "HEAD") + * @returns Promise resolving to categorized changed files filtered for Swagger files only + */ +export async function getSwaggerDiffs( + options: { + baseCommitish?: string; + cwd?: string; + headCommitish?: string; + } = {}, +): Promise<{ + additions: string[]; + modifications: string[]; + deletions: string[]; + renames: { from: string; to: string }[]; + total: number; +}> { + try { + // Call the function with compatible options + const result = await getChangedFilesStatuses({ + baseCommitish: options.baseCommitish, + cwd: options.cwd, + headCommitish: options.headCommitish, + }); + + // Filter each array to only include Swagger files using the swagger filter from changed-files.js + const filteredAdditions = result.additions.filter(swagger); + const filteredModifications = result.modifications.filter(swagger); + const filteredDeletions = result.deletions.filter(swagger); + const filteredRenames = result.renames.filter( + (rename) => swagger(rename.from) && swagger(rename.to), + ); + + return { + additions: filteredAdditions, + modifications: filteredModifications, + deletions: filteredDeletions, + renames: filteredRenames, + total: + filteredAdditions.length + + filteredModifications.length + + filteredDeletions.length + + filteredRenames.length, + }; + } catch (error) { + console.error("Error getting categorized changed files:", error); + // Return empty result on error + return { + additions: [], + modifications: [], + deletions: [], + renames: [], + total: 0, + }; + } +} + +/** + * NOTE: For base branch which not in targetBranches, the breaking change tool compare head branch with master branch. + * TargetBranches is a set of branches and treat each of them like a service team master branch. + */ +export async function buildPrInfo(context: Context): Promise { + /** + * For PR target branch not in `targetBranches`. prepare for switch to master branch, + * if not the switching to master below would failed + */ + defaultBreakingChangeBaseBranch = context.baseBranch; + const prInfo = await createPullRequestProperties( + context, + context.runType === "CrossVersion" ? "cross-version" : "same-version", + ); + if (!prInfo || !prInfo.targetBranch) { + throw new Error("create PR failed!"); + } + context.prInfo = prInfo; +} + +// Constants and state for dummy swagger management +const whitelistsBranches = ["ARMCoreRPDev", "rpsaasmaster"]; +const createdDummySwagger: string[] = []; + +/** + * Change the base branch for comparison based on context and whitelist rules + */ +export function changeBaseBranch(context: Context): void { + /* + * always compare against main + * we still use the changed files got from the PR, because the main branch may quite different with the PR target branch + */ + function isBreakingChangeWhiteListBranch() { + return ( + isSameVersionBreakingType(context.runType) && + whitelistsBranches.some((b) => context.prTargetBranch.toLowerCase() === b.toLowerCase()) + ); + } + // same version breaking change for PR targets to rpaas or armCoreRpDev, will compare with the original target branch. + if (context.baseBranch !== context.prTargetBranch && !isBreakingChangeWhiteListBranch()) { + context.prInfo!.baseBranch = context.baseBranch; + logMessage(`switch target branch to ${context.baseBranch}`); + } +} + +/** + * Log the full list of OAD messages to console + */ +export function logFullOadMessagesList(msgs: ResultMessageRecord[]): void { + logMessage("---- Full list of messages ----"); + logMessage("["); + // Printing the messages one by one because the console.log appears to elide the messages with "... X more items" + // after approximately 292 messages. + for (const msg of msgs) { + logMessage(JSON.stringify(msg, null, 4) + ","); + } + logMessage("]"); + logMessage("---- End of full list of messages ----"); +} + +/** + * Create a dummy swagger file for comparison purposes + */ +export function createDummySwagger(fromSwagger: string, toSwagger: string): void { + if (!existsSync(path.dirname(toSwagger))) { + mkdirSync(path.dirname(toSwagger), { recursive: true }); + } + const content = readFileSync(fromSwagger).toString(); + const swaggerJson = JSON.parse(content); + swaggerJson.paths = {}; + if (swaggerJson["x-ms-paths"]) { + swaggerJson["x-ms-paths"] = {}; + } + if (swaggerJson["x-ms-parameterized-host"]) { + delete swaggerJson["x-ms-parameterized-host"]; + } + swaggerJson.parameters = {}; + swaggerJson.definitions = {}; + writeFileSync(toSwagger, JSON.stringify(swaggerJson, null, 2)); + createdDummySwagger.push(toSwagger); + logMessage(`created a dummy swagger: ${toSwagger} from ${fromSwagger}`); +} + +/** + * Clean up all created dummy swagger files + */ +export function cleanDummySwagger(): void { + for (const swagger of createdDummySwagger) { + rmSync(swagger, { recursive: true, force: true }); + } + // Clear the array after removing files + createdDummySwagger.length = 0; +} + +/** + * Return true if the type indicates the same version breaking change + */ +export function isSameVersionBreakingType(type: BreakingChangesCheckType): boolean { + return type === "SameVersion"; +} + +/** + * Get the count of created dummy swagger files + */ +export function getCreatedDummySwaggerCount(): number { + return createdDummySwagger.length; +} diff --git a/eng/tools/openapi-diff-runner/src/commands.ts b/eng/tools/openapi-diff-runner/src/commands.ts new file mode 100644 index 000000000000..84cf1dbb111c --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/commands.ts @@ -0,0 +1,251 @@ +/** + * By design, the only members exported from this file are: + * - function validateBreakingChange() + * - let defaultBreakingChangeBaseBranch + * + * defaultBreakingChangeBaseBranch must be exported as it is a static variable. TODO: refactor it. + * + * In the "breakingChanges directory invocation depth" this file has depth 1, + * i.e. it is invoked by files with depth 0 and invokes files with depth 2. + */ + +import { RawMessageRecord, ResultMessageRecord } from "./types/message.js"; +import { existsSync } from "node:fs"; +import * as path from "path"; +import { createOadTrace, setOadBaseBranch, generateOadMarkdown } from "./types/oad-types.js"; +import { + createBreakingChangeDetectionContext, + checkBreakingChangeOnSameVersion, +} from "./detect-breaking-change.js"; +import { BreakingChangesCheckType, Context } from "./types/breaking-change.js"; +import { + getSwaggerDiffs, + changeBaseBranch, + logFullOadMessagesList, + createDummySwagger, + cleanDummySwagger, + isSameVersionBreakingType, + getCreatedDummySwaggerCount, + outputBreakingChangeLabelVariables, +} from "./command-helpers.js"; +import { generateBreakingChangeResultSummary } from "./generate-report.js"; +import { LOG_PREFIX, logMessage } from "./log.js"; +import { appendMarkdownToLog } from "./utils/oad-message-processor.js"; + +/** + * The function validateBreakingChange() is executed with type SameVersion or CrossVersion, by + * corresponding runScript functions in: + * - breakingChangeValidationPipeline.ts + * - crossVersionBreakingChangeValidationPipeline.ts + * + * Most importantly, this function does the following: + * + * 1. Invokes + * detect-breaking-change.checkBreakingChangeOnSameVersion() + * or + * detect-breaking-change.checkCrossVersionBreakingChange(), + * depending on the input type. + * + * 2. Saves the PR context to the unified pipeline store ("pipe.log" file) in call to: + * oadTracer.save(context.contextConfig() as PRContext); + * Note that this does not save the OAD messages to the unified pipeline store. + * Instead, they are saved to unified pipeline store within step 1. + * + * 3. Adds "review required" labels to ADO pipeline variable, in call to: + * ruleManager.addBreakingChangeLabelsToBeAdded(comparisonType); + * + * 4. Outputs full list of the OAD messages to build log for human review, in call to: + * logFullOadMessagesList() + * TODO: add breaking change labels + */ +export async function validateBreakingChange(context: Context): Promise { + let statusCode: number = 0; + let oadTracer = createOadTrace(context); + logMessage("ENTER definition validateBreakingChange"); + + logMessage(`PR target branch is ${context.prInfo ? context.prTargetBranch : ""}`); + + const diffs = await getSwaggerDiffs(); + + logMessage("Found PR changes:"); + logMessage(JSON.stringify(diffs, null, 2)); + + let swaggersToProcess = diffs.modifications?.concat(diffs.additions || []) as Array; + + logMessage("Processing swaggers:"); + logMessage(JSON.stringify(swaggersToProcess, null, 2)); + + // 1 switch pr to base branch + changeBaseBranch(context); + await context.prInfo?.checkout(context.prInfo.baseBranch); + + const newSwaggers = diffs.additions || []; + + const changedSwaggers = diffs.modifications || []; + + const deletedSwaggers = diffs.deletions || []; + + const newExistingVersionDirs: string[] = []; + + const addedVersionDirs = [...newSwaggers.map((f: string) => path.dirname(f))]; + + for (const f of addedVersionDirs) { + if (existsSync(path.join(context.prInfo!.workingDir, f))) { + newExistingVersionDirs.push(f); + } + } + // new swaggers in the existing version folder + const newExistingVersionSwaggers = newSwaggers.filter((f: string) => + newExistingVersionDirs.includes(path.dirname(f)), + ); + const needCompareDeletedSwaggers: string[] = deletedSwaggers.filter((f: string) => + existsSync(path.join(context.prInfo!.workingDir, f)), + ); + + const newVersionSwaggers = newSwaggers.filter( + (f: string) => !newExistingVersionDirs.includes(path.dirname(f)), + ); + const nonExistingChangedSwaggers = changedSwaggers.filter( + (f: string) => !existsSync(path.join(context.prInfo!.workingDir, f)), + ); + const existingChangedSwaggers = changedSwaggers.filter( + (f: string) => !nonExistingChangedSwaggers.includes(f), + ); + const needCompareOldSwaggers = existingChangedSwaggers + .concat(newExistingVersionSwaggers) + .concat(needCompareDeletedSwaggers); + + logMessage("Found new version swaggers:"); + logMessage(JSON.stringify(newVersionSwaggers, null, 2)); + + logMessage("Found new existing version swaggers:"); + logMessage(JSON.stringify(newExistingVersionSwaggers, null, 2)); + + logMessage("Found changed existing swaggers:"); + logMessage(JSON.stringify(existingChangedSwaggers, null, 2)); + + logMessage("The following changed swaggers are not existed in base branch:"); + logMessage(JSON.stringify(nonExistingChangedSwaggers, null, 2)); + + logMessage("The following are deleted swaggers that need to do the comparison: "); + logMessage(JSON.stringify(needCompareDeletedSwaggers, null, 2)); + + logMessage( + `Creating dummy files to compare for deleted Swagger files. Count: ${needCompareDeletedSwaggers.length}`, + ); + + // create a dummy file to compare. if the deleted file exists in base branch + for (const f of needCompareDeletedSwaggers) { + const baseFilePath = path.join(context.prInfo!.workingDir, f); + if (isSameVersionBreakingType(context.runType)) { + createDummySwagger(baseFilePath, path.resolve(f)); + } + } + + logMessage( + `Creating dummy files to compare for new Swagger files in existing API version folders. ` + + `Count: ${newExistingVersionSwaggers.length}`, + ); + + // create dummy swagger for new swaggers whose api version already existed before the PR. + newExistingVersionSwaggers.forEach((f: string) => { + const oldSwagger = path.join(context.prInfo!.workingDir, f); + if (isSameVersionBreakingType(context.runType)) { + createDummySwagger(path.resolve(f), oldSwagger); + } + }); + + if (context.prInfo) { + oadTracer = setOadBaseBranch(oadTracer, context.prInfo.baseBranch); + const detectionContext = createBreakingChangeDetectionContext( + context, + needCompareOldSwaggers, + oadTracer, + ); + + let msgs: ResultMessageRecord[] = []; + let runtimeErrors: RawMessageRecord[] = []; + let oadViolationsCnt: number = 0; + let errorCnt: number = 0; + + let comparisonType: BreakingChangesCheckType = isSameVersionBreakingType(context.runType) + ? "SameVersion" + : "CrossVersion"; + + ({ msgs, runtimeErrors, oadViolationsCnt, errorCnt } = + await checkBreakingChangeOnSameVersion(detectionContext)); + const comparedSpecsTableContent = generateOadMarkdown(detectionContext.oadTracer); + + // Log the markdown content to the pipeline log file + if (comparedSpecsTableContent) { + appendMarkdownToLog(context.oadMessageProcessorContext, comparedSpecsTableContent); + } + + // process breaking change labels + outputBreakingChangeLabelVariables(); + + // If exitCode is already defined and non-zero, we do not interfere with its value here. + if (process.exitCode === undefined || process.exitCode === 0) { + // This exitCode determines if the relevant GitHub breaking change check + // will fail. We want for it to fail only if: + // + // Case 1: there was at least one label added denoting breaking change issue, as declared by oadMessagesRuleMap.ts + // + // OR + // + // Case 2: there was at least one runtime error that is not a warning. + // + // Notably, we want for the exitCode to remain 0, denoting success, in following cases: + // - If there are messages from OAD (openapi-diff) denoting violations, but none + // of them resulted in adding any breaking changes labels. + // This is why we do not include 'oadViolationsCnt' in this formula at all. + // Instead, we rely on 'labelsAddedCount'. + // See https://github.com/Azure/azure-sdk-tools/issues/6396 + // - If there are errors, but they are only warning-level. This happens when comparing + // to previous preview version. In such cases, these errors are not included in the 'errorCnt' at all. + //process.exitCode = labelsAddedCount > 0 || errorCnt > 0 ? 1 : 0; + process.exitCode = errorCnt > 0 ? 1 : 0; + } + + logMessage( + `${LOG_PREFIX}validateBreakingChange: prUrl: ${context.prUrl}, ` + + `comparisonType: ${comparisonType}, labelsAddedCount: , ` + + `errorCnt: ${errorCnt}, oadViolationsCnt: ${oadViolationsCnt}, ` + + `process.exitCode: ${process.exitCode}`, + ); + + if (process.exitCode === 0 && oadViolationsCnt > 0) { + // We are using this log as a metric to track and measure impact of the work on improving "breaking changes" tooling. Log statement added around 2/22/2024. + // See: https://github.com/Azure/azure-sdk-tools/issues/7223#issuecomment-1839830834 + logMessage( + `${LOG_PREFIX}validateBreakingChange: ` + + `Prevented spurious failure of breaking change check. prUrl: ${context.prUrl}, ` + + `comparisonType: ${comparisonType}, oadViolationsCnt: ${oadViolationsCnt}, ` + + `process.exitCode: ${process.exitCode}.`, + ); + } + if (oadViolationsCnt > 0 || errorCnt > 0) { + // set statusCode to 1 if there are any OAD violations(errors) or runtime errors occurred. + statusCode = 1; + } + + logFullOadMessagesList(msgs); + await generateBreakingChangeResultSummary( + context, + msgs, + runtimeErrors, + comparedSpecsTableContent, + "", + ); + } else { + logMessage("!pr. Skipping the process of breaking change detection."); + } + + logMessage(`Cleaning up dummy files. Count: ${getCreatedDummySwaggerCount()}`); + + cleanDummySwagger(); + + logMessage("RETURN definition validateBreakingChange"); + logMessage(`${LOG_PREFIX}validateBreakingChange: statusCode: ${statusCode}`); + return statusCode; +} diff --git a/eng/tools/openapi-diff-runner/src/detect-breaking-change.ts b/eng/tools/openapi-diff-runner/src/detect-breaking-change.ts new file mode 100644 index 000000000000..089e9dc455d8 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/detect-breaking-change.ts @@ -0,0 +1,221 @@ +/** + * By design, the members exported from this file are functional breaking change detection utilities. + * + * In the "breakingChanges directory invocation depth" this file has depth 2, + * i.e. it is invoked by files with depth 1 and invokes files with depth 3. + */ +import { + ApiVersionLifecycleStage, + BreakingChangesCheckType, + Context, + logFileName, +} from "./types/breaking-change.js"; +import { RawMessageRecord, ResultMessageRecord } from "./types/message.js"; +import { + blobHref, + branchHref, + getRelativeSwaggerPathToRepo, + processOadRuntimeErrorMessage, + specIsPreview, +} from "./utils/common-utils.js"; +import { appendFileSync } from "node:fs"; +import * as path from "path"; +import { applyRules } from "./utils/apply-rules.js"; +import { OadMessage, OadTraceData, addOadTrace } from "./types/oad-types.js"; +import { runOad } from "./run-oad.js"; +import { processAndAppendOadMessages } from "./utils/oad-message-processor.js"; +import { logError, logMessage } from "./log.js"; + +// We want to display some lines as we improved AutoRest v2 error output since March 2024 to provide multi-line error messages, e.g.: +// https://github.com/Azure/autorest/pull/4934 +// For console (diagnostic) logs we want to display the entire stack trace. +// The value here is an arbitrary high number to limit the stack trace in case a bug would cause it to be excessively long. +const stackTraceMaxLength = 500; + +/** + * Context for breaking change detection operations + */ +export interface BreakingChangeDetectionContext { + context: Context; + oldSwaggers: string[]; + oadTracer: OadTraceData; + msgs: ResultMessageRecord[]; + runtimeErrors: RawMessageRecord[]; + tempTagName: string; +} + +/** + * Create a new breaking change detection context + */ +export function createBreakingChangeDetectionContext( + context: Context, + oldSwaggers: string[], + oadTracer: OadTraceData, +): BreakingChangeDetectionContext { + return { + context, + oldSwaggers, + oadTracer, + msgs: [], + runtimeErrors: [], + tempTagName: "oad-default-tag", + }; +} + +/** + * The entry points for breaking change detection are: + * - checkBreakingChangeOnSameVersion() + * - checkCrossVersionBreakingChange() (TODO: implement) + * both of which are invoked by the function commands.ts / validateBreakingChange() + * TODO migrate swaggerVersionManager to support cross-version checks + */ + +/** The function checkBreakingChangeOnSameVersion() + * maps to the lower "Same-version check" rectangle at: + * https://aka.ms/azsdk/pr-brch-deep#diagram-explaining-breaking-changes-and-versioning-issues + * + * This function is called by the function commands.ts / validateBreakingChange() + * This function calls doBreakingChangeDetection with appropriate "type" and "isCrossVersion" parameters. + */ +export async function checkBreakingChangeOnSameVersion( + detectionContext: BreakingChangeDetectionContext, +): Promise<{ + msgs: ResultMessageRecord[]; + runtimeErrors: RawMessageRecord[]; + oadViolationsCnt: number; + errorCnt: number; +}> { + logMessage(`ENTER definition checkBreakingChangeOnSameVersion`); + + let aggregateOadViolationsCnt = 0; + let aggregateErrorCnt = 0; + + for (const swaggerJson of detectionContext.oldSwaggers) { + const { oadViolationsCnt, errorCnt } = await doBreakingChangeDetection( + detectionContext, + path.resolve(detectionContext.context.prInfo!.workingDir, swaggerJson), + swaggerJson, + "SameVersion", + specIsPreview(swaggerJson) ? "preview" : "stable", + ); + aggregateOadViolationsCnt += oadViolationsCnt; + aggregateErrorCnt += errorCnt; + } + + logMessage( + `RETURN definition checkBreakingChangeOnSameVersion. ` + + `msgs.length: ${detectionContext.msgs.length}, ` + + `aggregateOadViolationsCnt: ${aggregateOadViolationsCnt}, aggregateErrorCnt: ${aggregateErrorCnt}`, + ); + + return { + msgs: detectionContext.msgs, + runtimeErrors: detectionContext.runtimeErrors, + oadViolationsCnt: aggregateOadViolationsCnt, + errorCnt: aggregateErrorCnt, + }; +} + +/** + * The function doBreakingChangeDetection() + * is called by + * + * - checkBreakingChangeOnSameVersion() + * - or checkCrossVersionBreakingChange() + * + * with appropriate options. + * + * Most importantly, this function does the following: + * + * 1. Invokes "@azure/oad" via call to runOad() to obtain OadMessage[] collection. + * + * 2. It post-processes the OadMessage[] collection by calling + * applyRules() function + * + * which uses the oadMessagesRuleMap.ts config to schedule + * appropriate "review required" labels to be added downstream by doBreakingChangeDetection() calling addBreakingChangeLabelsToBeAdded() + * as well as updates the OAD messages severity. + * + * 3. It saves the OadMessage[] collection to the unified pipeline store ("pipe.log" file) in call to: + * processAndAppendOadMessages() + * + * 4. It saves OAD errors, if any, to the unified pipeline store ("pipe.log" file) in call to: + * appendOadRuntimeErrors() + */ +export async function doBreakingChangeDetection( + detectionContext: BreakingChangeDetectionContext, + oldSpec: string, + newSpec: string, + scenario: BreakingChangesCheckType, + previousApiVersionLifecycleStage: ApiVersionLifecycleStage, +): Promise<{ oadViolationsCnt: number; errorCnt: number }> { + logMessage(`ENTER definition doBreakingChangeDetection oldSpec: ${oldSpec}, newSpec: ${newSpec}`); + + let oadViolationsCnt = 0; + let errorCnt = 0; + + try { + await detectionContext.context.prInfo!.checkout(detectionContext.context.prInfo!.baseBranch); + const oadMessages = await runOad( + path.resolve(detectionContext.context.localSpecRepoPath, oldSpec), + newSpec, + ); + + // Handle tracing separately - no need for a trace of two tags comparison + detectionContext.oadTracer = addOadTrace( + detectionContext.oadTracer, + getRelativeSwaggerPathToRepo(oldSpec), + newSpec, + ); + + const modifiedOadMessages: OadMessage[] = applyRules( + oadMessages, + scenario, + previousApiVersionLifecycleStage, + ); + + oadViolationsCnt += modifiedOadMessages.filter( + (oadMessage) => oadMessage.type === "Error", + ).length; + + const msgs: ResultMessageRecord[] = processAndAppendOadMessages( + detectionContext.context.oadMessageProcessorContext, + modifiedOadMessages, + detectionContext.context.baseBranch, + ); + detectionContext.msgs = detectionContext.msgs.concat(msgs); + } catch (e) { + const error = e instanceof Error ? e : new Error(String(e)); + const runtimeError: RawMessageRecord = { + type: "Raw", + level: "Error", + message: "Runtime Exception", + time: new Date(), + groupName: previousApiVersionLifecycleStage, + extra: { + new: blobHref(getRelativeSwaggerPathToRepo(newSpec)), + old: branchHref( + getRelativeSwaggerPathToRepo( + path.resolve(detectionContext.context.localSpecRepoPath, oldSpec), + ), + detectionContext.context.baseBranch, + ), + details: processOadRuntimeErrorMessage(error.message, stackTraceMaxLength), + }, + }; + detectionContext.runtimeErrors.push(runtimeError); + errorCnt += 1; + appendFileSync(logFileName, JSON.stringify(runtimeError) + "\n"); + logError(`appendOadRuntimeErrors: ${JSON.stringify(runtimeError)}`); + } + + logMessage( + `RETURN definition doBreakingChangeDetection ` + + `scenario: ${scenario}, ` + + `previousApiVersionLifecycleStage: ${previousApiVersionLifecycleStage}, ` + + `oldSpec: ${oldSpec}, newSpec: ${newSpec}, ` + + `oadViolationsCnt: ${oadViolationsCnt}, errorCnt: ${errorCnt}`, + ); + + return { oadViolationsCnt, errorCnt }; +} diff --git a/eng/tools/openapi-diff-runner/src/generate-report.ts b/eng/tools/openapi-diff-runner/src/generate-report.ts new file mode 100644 index 000000000000..0cf726c1fc39 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/generate-report.ts @@ -0,0 +1,289 @@ +import { + BrChMsgRecord, + getKey, + MessageLevel, + RawMessageRecord, + ResultMessageRecord, +} from "./types/message.js"; +import { ApiVersionLifecycleStage, Context } from "./types/breaking-change.js"; +import { + BreakingChangeMdReport, + createBreakingChangeMdReport, + reportToString, + sortBreakingChangeMdReports, +} from "./utils/markdown-report.js"; +import { addToSummary, logMessage, logWarning } from "./log.js"; + +// Per the GitHub documentation [1], the length limit of a check pane is 65535 characters. +// While not immediately obvious, it looks like the 65535 limit applies to the total length of the text and summary, +// not separately. +// +// [1] Properties of output / text and output / summary at: +// https://docs.github.com/en/rest/checks/runs?apiVersion=2022-11-28#create-a-check-run +const checkPaneLengthLimit = 65535; + +// GitHub Actions job summary limit is 1MB (much more generous than check runs) +const jobSummaryLengthLimit = 1048576; // 1MB in bytes + +export async function generateBreakingChangeResultSummary( + context: Context, + messages: ResultMessageRecord[], + runtimeErrors: RawMessageRecord[], + comparedSpecsTableContent: string, + summaryDataSuppressionAndDetailsText: string, +): Promise { + const allMessageRecords: BrChMsgRecord[] = [...messages, ...runtimeErrors]; + + const summaryData = getSummaryData( + context.checkName, + allMessageRecords, + summaryDataSuppressionAndDetailsText, + ); + const maxCommentDataLength = checkPaneLengthLimit - summaryData.length; + const commentData = await getCommentData( + context.checkName, + comparedSpecsTableContent, + allMessageRecords, + maxCommentDataLength, + ); + + // Construct complete markdown report for GitHub Actions job summary + const markdownReport = summaryData + commentData; + + // Output to GitHub Actions job summary + await writeToJobSummary(markdownReport); + + logMessage( + `RETURNING. messageRecords# raw/result/all: ` + + `${runtimeErrors.length}/${messages.length}/${runtimeErrors.length + messages.length}, ` + + `length summary/comment/(summary+comment): ` + + `${summaryData.length}/${commentData.length}/${summaryData.length + commentData.length}.`, + ); +} + +async function getCommentData( + checkName: string, + comparedSpecsTableContent: string, + msgs: BrChMsgRecord[], + maxCommentDataLength: number, +): Promise { + // Add blank line before table if table content exists to ensure proper markdown rendering + const markdownMessageRow = comparedSpecsTableContent + ? "\n" + comparedSpecsTableContent + "\n" + : ""; + const textPrefixLength = markdownMessageRow.length; + const reportsString: string = await getReportsAsString( + checkName, + msgs, + textPrefixLength, + maxCommentDataLength, + ); + + let commentData = markdownMessageRow + reportsString; + if (commentData.length > maxCommentDataLength) { + logWarning( + `ASSERTION VIOLATION! commentData.length == ${commentData.length} which is > maxCommentDataLength of ${maxCommentDataLength}.`, + ); + commentData = commentData.substring(0, maxCommentDataLength - 20) + "... ⚠️ TRUNCATED ⚠️"; + } + + return commentData; +} + +async function getReportsAsString( + checkName: string, + msgs: BrChMsgRecord[], + textPrefixLength: number, + maxCommentDataLength: number, +): Promise { + let [stableReports, previewReports, maxRowCountAcrossKeys] = getReports(msgs); + let currentMaxRowCount = maxRowCountAcrossKeys; + + let reportsString: string = getReportsString( + checkName, + stableReports, + previewReports, + currentMaxRowCount, + ); + while ( + currentMaxRowCount > 0 && + !totalTextLengthWithinLimit(reportsString, textPrefixLength, maxCommentDataLength) + ) { + currentMaxRowCount--; + reportsString = getReportsString(checkName, stableReports, previewReports, currentMaxRowCount); + } + + if (!totalTextLengthWithinLimit(reportsString, textPrefixLength, maxCommentDataLength)) { + logWarning( + `ASSERTION VIOLATION! totalTextLengthWithinLimit is false. currentMaxRowCount: ${currentMaxRowCount}.`, + ); + } + + logMessage( + `getReportsAsOneString: RETURNING. ` + + `checkShowName: ${checkName}, ` + + `maxRowCount reduced/current/max: ${maxRowCountAcrossKeys - currentMaxRowCount}/${currentMaxRowCount}/${maxRowCountAcrossKeys}, ` + + `reportsString.length: ${reportsString.length}.`, + ); + + return reportsString; +} + +function getReports( + msgs: BrChMsgRecord[], +): [BreakingChangeMdReport[], BreakingChangeMdReport[], number] { + const msgsByKey: Record = groupMsgsByKey(msgs); + + let maxRowCount = 0; + let stableReports: BreakingChangeMdReport[] = []; + let previewReports: BreakingChangeMdReport[] = []; + + Object.entries(msgsByKey).forEach(([, msgs]) => { + const stableMsgs = msgs.filter( + (msg) => (msg.groupName as ApiVersionLifecycleStage) == "stable", + ); + const previewMsgs = msgs.filter( + (msg) => (msg.groupName as ApiVersionLifecycleStage) == "preview", + ); + + if (stableMsgs.length > 0) { + stableReports.push(createBreakingChangeMdReport(stableMsgs)); + } + if (previewMsgs.length > 0) { + previewReports.push(createBreakingChangeMdReport(previewMsgs)); + } + + maxRowCount = Math.max(maxRowCount, stableMsgs.length, previewMsgs.length); + }); + + return [ + sortBreakingChangeMdReports(stableReports), + sortBreakingChangeMdReports(previewReports), + maxRowCount, + ]; +} + +function getReportsString( + checkName: string, + stableReports: BreakingChangeMdReport[], + previewReports: BreakingChangeMdReport[], + maxRowCount: number, +): string { + if (stableReports.length == 0 && previewReports.length == 0) { + return `No breaking changes detected.\n`; + } + + if (checkName === "Swagger BreakingChange") { + return [...stableReports, ...previewReports] + .map((report) => reportToString(report, maxRowCount)) + .join("\n"); + } else { + return getComparedApiVersionsReportsString(stableReports, previewReports, maxRowCount); + } +} + +function getComparedApiVersionsReportsString( + stableReports: BreakingChangeMdReport[], + previewReports: BreakingChangeMdReport[], + maxRowCount: number, +): string { + const stableApiVersionComparisonReportsString: string = getReportsComparedToApiVersionString( + stableReports, + "stable", + maxRowCount, + ); + const previewApiVersionComparisonReportsString: string = getReportsComparedToApiVersionString( + previewReports, + "preview", + maxRowCount, + ); + return stableApiVersionComparisonReportsString + previewApiVersionComparisonReportsString; +} + +function getReportsComparedToApiVersionString( + reports: BreakingChangeMdReport[], + comparedApiVersion: ApiVersionLifecycleStage, + maxRowCount: number, +): string { + return ( + `# The following breaking changes have been detected in comparison to the latest ${comparedApiVersion} version\n` + + (reports.length > 0 + ? reports.map((report) => reportToString(report, maxRowCount)).join("\n") + : "No breaking changes detected in this comparison.\n") + ); +} + +function totalTextLengthWithinLimit( + reportsString: string, + textPrefixLength: number, + maxCommentDataLength: number, +): boolean { + return textPrefixLength + reportsString.length <= maxCommentDataLength; +} + +function groupMsgsByKey(msgs: BrChMsgRecord[]): Record { + return msgs.reduce((msgsByKey: Record, msg: BrChMsgRecord) => { + const key = getKey(msg); + if (!msgsByKey[key]) { + msgsByKey[key] = []; + } + msgsByKey[key].push(msg); + return msgsByKey; + }, {}); +} + +function getSummaryData( + checkName: string, + messageRecords: BrChMsgRecord[], + summaryDataSuppressionAndDetailsText: string, +): string { + const errorCount = getMessageLevelCounts(messageRecords, "Error"); + const warningCount = getMessageLevelCounts(messageRecords, "Warning"); + let summaryTitle = checkName; + if (errorCount > 0) { + summaryTitle = + `Detected: ${getMessageLevelCounts(messageRecords, "Error")} Errors, ` + + `${getMessageLevelCounts(messageRecords, "Warning")} Warnings\n`; + } else if (warningCount > 0) { + summaryTitle = `Detected: ${getMessageLevelCounts(messageRecords, "Warning")} Warnings\n`; + } + + return ( + summaryTitle + + summaryDataSuppressionAndDetailsText + + `> [!IMPORTANT]\n` + + `> Browse to the job logs to see the details.\n` + ); +} + +function getMessageLevelCounts(msgs: BrChMsgRecord[], msgLevel: MessageLevel) { + return msgs.filter((msg) => msg.level === msgLevel).length; +} + +/** + * Writes markdown content to GitHub Actions job summary + * Handles the 1MB limit and truncation if necessary + */ +async function writeToJobSummary(markdownContent: string): Promise { + if (!process.env.GITHUB_STEP_SUMMARY) { + logMessage("GitHub Actions job summary not available, skipping summary output."); + return; + } + + let finalContent = markdownContent; + + // Check if content exceeds job summary limit (1MB) + if (markdownContent.length > jobSummaryLengthLimit) { + const truncationMessage = + "\n\n⚠️ **Report truncated due to GitHub Actions job summary size limits (1MB)** ⚠️\n\nFor the complete report, please check the build logs."; + const availableSpace = jobSummaryLengthLimit - truncationMessage.length; + finalContent = markdownContent.substring(0, availableSpace) + truncationMessage; + + logWarning( + `Job summary content truncated. Original length: ${markdownContent.length}, truncated to: ${finalContent.length}`, + ); + } + + addToSummary(finalContent); + logMessage(`Successfully wrote ${finalContent.length} characters to GitHub Actions job summary.`); +} diff --git a/eng/tools/openapi-diff-runner/src/index.ts b/eng/tools/openapi-diff-runner/src/index.ts new file mode 100644 index 000000000000..65389cad66e0 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/index.ts @@ -0,0 +1,17 @@ +import { exit } from "node:process"; +import { validateBreakingChange } from "./commands.js"; +import { buildPrInfo, initContext } from "./command-helpers.js"; +import { logMessage } from "./log.js"; + +export async function main() { + // Get the arguments passed to the script + const args: string[] = process.argv.slice(2); + // Log the arguments to the console + logMessage(`Arguments passed to the script: ${args.join(" ")}`); + logMessage(`Current working directory: ${process.cwd()}`); + const context = initContext(); + await buildPrInfo(context); + let statusCode = 0; + statusCode = await validateBreakingChange(context); + exit(statusCode); +} diff --git a/eng/tools/openapi-diff-runner/src/log.ts b/eng/tools/openapi-diff-runner/src/log.ts new file mode 100644 index 000000000000..d04698653db0 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/log.ts @@ -0,0 +1,132 @@ +import { appendFileSync } from "fs"; + +/** + * Log prefix for all messages from openapi-diff-runner + */ +export const LOG_PREFIX = "Runner-"; + +export enum LogLevel { + Error = "error", + Warn = "warn", + Info = "info", + Debug = "debug", + Notice = "notice", + Group = "group", + EndGroup = "endgroup", +} + +/** + * Logs a message to the console with GitHub Actions workflow commands. + * Automatically prefixes messages with LOG_PREFIX. + * @param message The message to log. + * @param level The log level (e.g., LogLevel.Group, LogLevel.EndGroup, LogLevel.Debug, LogLevel.Error). + */ +export function logMessage(message: string, level?: LogLevel): void { + switch (level) { + case LogLevel.Group: { + console.log(`::group::${message}`); + break; + } + case LogLevel.EndGroup: { + console.log(`::endgroup::`); + break; + } + case LogLevel.Debug: { + console.log(`::debug::${message}`); + break; + } + case LogLevel.Error: { + console.log(`::error::${message}`); + break; + } + case LogLevel.Warn: { + console.log(`::warning::${message}`); + break; + } + case LogLevel.Notice: { + console.log(`::notice::${message}`); + break; + } + case LogLevel.Info: + default: { + console.log(message); + break; + } + } +} + +/** + * Log an error with file location information for GitHub Actions + * Automatically prefixes messages with LOG_PREFIX. + * @param message Error message + * @param file File path (optional) + * @param line Line number (optional) + * @param col Column number (optional) + */ +export function logError(message: string, file?: string, line?: number, col?: number): void { + if (file) { + const location = line && col ? `line=${line},col=${col}` : line ? `line=${line}` : ""; + const fileLocation = location ? `file=${file},${location}` : `file=${file}`; + console.log(`::error ${fileLocation}::${message}`); + } else { + console.log(`::error::${message}`); + } +} + +/** + * Log a warning with file location information for GitHub Actions + * Automatically prefixes messages with LOG_PREFIX. + * @param message Warning message + * @param file File path (optional) + * @param line Line number (optional) + * @param col Column number (optional) + */ +export function logWarning(message: string, file?: string, line?: number, col?: number): void { + if (file) { + const location = line && col ? `line=${line},col=${col}` : line ? `line=${line}` : ""; + const fileLocation = location ? `file=${file},${location}` : `file=${file}`; + console.log(`::warning ${fileLocation}::${message}`); + } else { + console.log(`::warning::${message}`); + } +} + +/** + * Set an output parameter in GitHub Actions + * @param name Output parameter name + * @param value Output parameter value + */ +export function setOutput(name: string, value: string): void { + if (process.env.GITHUB_OUTPUT) { + appendFileSync(process.env.GITHUB_OUTPUT, `${name}=${value}\n`); + } else { + // Fallback to older syntax + console.log(`::set-output name=${name}::${value}`); + } +} + +/** + * Add content to the GitHub Actions job summary + * @param content Content to add to summary + */ +export function addToSummary(content: string): void { + if (process.env.GITHUB_STEP_SUMMARY) { + appendFileSync(process.env.GITHUB_STEP_SUMMARY, content); + } + // Do nothing if GITHUB_STEP_SUMMARY is not available +} + +/** + * Create a collapsible group in logs + * @param title Group title + * @param content Function that logs the group content + */ +export async function logGroup(title: string, content: () => Promise | T): Promise { + logMessage(title, LogLevel.Group); + try { + const result = await content(); + return result; + } finally { + logMessage("", LogLevel.EndGroup); + } +} diff --git a/eng/tools/openapi-diff-runner/src/run-oad.ts b/eng/tools/openapi-diff-runner/src/run-oad.ts new file mode 100644 index 000000000000..2ac296a38f0a --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/run-oad.ts @@ -0,0 +1,88 @@ +/** + * By design, the only member exported from this file is the runOad function. + * + * In the "breakingChanges directory invocation depth" this file has depth 3, + * i.e. it is invoked by files with depth 2. + */ +import * as oad from "@azure/oad"; +import { OadMessage } from "./types/oad-types.js"; +import { logMessage } from "./log.js"; + +/** + * The runOad() function is a wrapper around the "@azure/oad" library whose source is https://github.com/Azure/openapi-diff. + * + * runOad() is invoked by BreakingChangeDetector.doBreakingChangeDetection(). + * + * runOad() eventually invokes openApiDiff.compare() which calls into openapi-diff repo OpenApiDiff class compare() method [1], + * which obtains semantic model of the old / new (before / after) swagger file specs and then compares them + * via the C# OpenApiDiff.Program.Compare() method [2]. + * + * The OAD messages returned from "@azure/oad" invocation are then slightly post-processed and returned as OadMessage[] collection. + * + * See also: OadMessage type comment. + * + * [1] https://github.com/Azure/openapi-diff/blob/4c158308aca2cfd584e556fe8a05ce6967de2912/src/lib/validators/openApiDiff.ts#L128 + * [2] https://github.com/Azure/openapi-diff/blob/4c158308aca2cfd584e556fe8a05ce6967de2912/openapi-diff/src/core/OpenApiDiff/Program.cs#L40 + */ +export async function runOad( + oldSpec: string, + newSpec: string, + oldTag?: string, + newTag?: string, +): Promise { + logMessage( + `ENTER definition runOad oldSpec: ${oldSpec}, newSpec: ${newSpec}, oldTag: ${oldTag}, newTag: ${newTag}`, + ); + + if ( + oldSpec === null || + oldSpec === undefined || + typeof oldSpec.valueOf() !== "string" || + !oldSpec.trim().length + ) { + throw new Error( + 'oldSpec is a required parameter of type "string" and it cannot be an empty string.', + ); + } + + if ( + newSpec === null || + newSpec === undefined || + typeof newSpec.valueOf() !== "string" || + !newSpec.trim().length + ) { + throw new Error( + 'newSpec is a required parameter of type "string" and it cannot be an empty string.', + ); + } + + let oadCompareOutput: string; + if (oldTag && newTag) { + logMessage("oad.CompareTags() when (oldTag && newTag)"); + + oadCompareOutput = await oad.compareTags(oldSpec, oldTag, newSpec, newTag, { + consoleLogLevel: "warn", + }); + } else { + logMessage("oad.CompareTags() when !(oldTag && newTag)"); + + oadCompareOutput = await oad.compare(oldSpec, newSpec, { consoleLogLevel: "warn" }); + } + + logMessage(`oadCompareOutput: ${oadCompareOutput}`); + + // The oadCompareOutput is emitted by this OAD source: + // OpenApiDiff.Program.Main(): + // https://github.com/Azure/openapi-diff/blob/7a3f705224e03de762689eeeb6d4f1b6820dc463/openapi-diff/src/core/OpenApiDiff/Program.cs#L40-L50 + // And each message is of type AutoRest.Swagger.ComparisonMessage: + // https://github.com/Azure/openapi-diff/blob/7a3f705224e03de762689eeeb6d4f1b6820dc463/openapi-diff/src/modeler/AutoRest.Swagger/ComparisonMessage.cs#L17-L17 + // after it was transformed by ComparisonMessage.GetValidationMessagesAsJson(). + const oadMessages: OadMessage[] = JSON.parse(oadCompareOutput); + + logMessage( + `RETURN definition runOad. oadMessages.length: ${oadMessages.length}, ` + + `oldSpec: ${oldSpec}, newSpec: ${newSpec}, oldTag: ${oldTag}, newTag: ${newTag}.`, + ); + + return oadMessages; +} diff --git a/eng/tools/openapi-diff-runner/src/types/breaking-change.ts b/eng/tools/openapi-diff-runner/src/types/breaking-change.ts new file mode 100644 index 000000000000..dae620f15912 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/types/breaking-change.ts @@ -0,0 +1,74 @@ +import { OadMessageProcessorContext } from "../utils/oad-message-processor.js"; +import { PullRequestProperties } from "../utils/pull-request.js"; +import { + VERSIONING_APPROVALS, + BREAKING_CHANGE_APPROVALS, + REVIEW_REQUIRED_LABELS, + BREAKING_CHANGES_CHECK_TYPES, +} from "@azure-tools/specs-shared/breaking-change"; + +/** + * This file contains types used by the OpenAPI specification breaking change checks + * in the Azure/azure-rest-api-specs and Azure/azure-rest-api-specs-pr repositories. + * + * For additional context, see: + * + * - "Deep-dive into breaking changes on spec PRs" + * https://aka.ms/azsdk/pr-brch-deep + * + * - "[Breaking Change][PR Workflow] Use more granular labels for Breaking Changes approvals" + * https://github.com/Azure/azure-sdk-tools/issues/6374 + */ + +// Derive TypeScript types from the JavaScript constants +export type BreakingChangesCheckType = + (typeof BREAKING_CHANGES_CHECK_TYPES)[keyof typeof BREAKING_CHANGES_CHECK_TYPES]; + +// Export the constant values for use in TypeScript +export const BreakingChangeReviewRequiredLabel = REVIEW_REQUIRED_LABELS.BREAKING_CHANGE; +export const VersioningReviewRequiredLabel = REVIEW_REQUIRED_LABELS.VERSIONING; + +// Derive types from constants +export type ReviewRequiredLabel = + (typeof REVIEW_REQUIRED_LABELS)[keyof typeof REVIEW_REQUIRED_LABELS]; +export type ValidVersioningApproval = + (typeof VERSIONING_APPROVALS)[keyof typeof VERSIONING_APPROVALS]; +export type ValidBreakingChangeApproval = + (typeof BREAKING_CHANGE_APPROVALS)[keyof typeof BREAKING_CHANGE_APPROVALS]; +export type ReviewApprovalPrefixLabel = "Versioning-Approved-*" | "BreakingChange-Approved-*"; + +export type SpecsBreakingChangesLabel = + | ReviewRequiredLabel + | ReviewApprovalPrefixLabel + | ValidBreakingChangeApproval + | ValidVersioningApproval; + +/** Corresponds to specs in "*\preview\*" or "*\stable\*" directories in the specs repos. + * Scheduled to replace type SwaggerVersionType and type ComparedApiVersion. + * Read more at https://aka.ms/azsdk/spec-dirs + */ +export type ApiVersionLifecycleStage = "preview" | "stable"; + +/** The name of the log file used by the openapi-diff-runner utility. */ +export const logFileName = "openapi-diff-runner.log"; + +/** + * Represents the input parameters for runner execution. + */ +export interface Context { + localSpecRepoPath: string; + workingFolder: string; + logFileFolder: string; + swaggerDirs: string[]; + baseBranch: string; + headCommit: string; + runType: BreakingChangesCheckType; + checkName: string; + repo: string; // The format is: "owner/repoName" + prNumber: string; + prSourceBranch: string; + prTargetBranch: string; + oadMessageProcessorContext: OadMessageProcessorContext; + prUrl: string; + prInfo?: PullRequestProperties; +} diff --git a/eng/tools/openapi-diff-runner/src/types/message.ts b/eng/tools/openapi-diff-runner/src/types/message.ts new file mode 100644 index 000000000000..95d222b793cf --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/types/message.ts @@ -0,0 +1,109 @@ +/** + * Note this corresponds to Category enum in openapi-diff. + * For details, see comment on breakingChangeShared.ts / OadMessage.type. + */ +export type MessageLevel = "Info" | "Warning" | "Error"; + +// Instances of this type are created e.g. by the function oadMessagesToResultMessageRecords +export type JsonPath = { + // Example values of tag: "old" or "new" + tag: string; + // Example value of path: + // sourceBranchHref(this.context, oadMessage.new.location || ""), + // where this.context is of type PRContext and oadMessage is of type OadMessage + path: string; + // Example value of jsonPath: + // oadMessage.new?.path + // where oadMessage is of type OadMessage + jsonPath?: string | undefined; +}; + +export type MessageContext = { + toolVersion: string; +}; + +export type Extra = { + [key: string]: any; +}; + +export type BaseMessageRecord = { + level: MessageLevel; + message: string; + time: Date; + context?: MessageContext; + group?: string; + extra?: Extra; + groupName?: string; +}; + +/** See comment on type MessageRecord */ +export type ResultMessageRecord = BaseMessageRecord & { + type: "Result"; + id?: string; + code?: string; + docUrl?: string; + paths: JsonPath[]; +}; + +export type RawMessageRecord = BaseMessageRecord & { + type: "Raw"; +}; + +export type MarkdownMessageRecord = BaseMessageRecord & { + type: "Markdown"; + mode: "replace" | "append"; + detailMessage?: string; +}; + +/** + * Represents a record of detailed information coming out of one of the validation tools, + * like breaking change detector (OAD) or LintDiff. + * + * MessageRecords end up being printed in the contents of tables in relevant validation tool check in GitHub PR. + * These records are transferred from the Azure DevOps Azure.azure-rest-api-specs-pipeline build runs + * to the GitHub via pipe.log file (pipeline.ts / unifiedPipelineResultFileName) and Azure blob. + * + * The pipe.log gets uploaded to the blob via + * publishResult.ts / resultPublisher.uploadLog + * + * The blob contents are read by the pipeline-bot via + * resultComposer.ts / parseCompleteMessageData + * + * Examples: + * Save message record from OAD to pipe.log: + * doBreakingChangeDetection / appendOadMessages + * + * Save exception thrown by OAD to pipe.log, as MessageLine composed of RawMessageRecord[] + * doBreakingChangeDetection / catch block + * + * For details, see: + * https://dev.azure.com/azure-sdk/internal/_wiki/wikis/internal.wiki/1011/How-the-data-in-breaking-change-GH-check-tables-is-getting-populated + */ +export type MessageRecord = ResultMessageRecord | RawMessageRecord | MarkdownMessageRecord; + +/** + * Represents a message record that is either a result of a validation rule violation or a raw message + * (e.g. an AutoRest exception). + * + * These records originate from "runOad" invocation. + */ +export type BrChMsgRecord = ResultMessageRecord | RawMessageRecord; + +export function getKey(msg: BrChMsgRecord): string { + if (msg.type === "Result") { + return [msg.id, msg.code].filter((s) => s).join(" - "); + } else { + // Example value of msgRecord.message here: "Runtime Exception" + return msg.message; + } +} + +/** + * See type MessageRecord + */ +export type MessageLine = MessageRecord | MessageRecord[]; + +export type FilePosition = { + readonly line: number; + readonly column: number; +}; diff --git a/eng/tools/openapi-diff-runner/src/types/oad-types.ts b/eng/tools/openapi-diff-runner/src/types/oad-types.ts new file mode 100644 index 000000000000..e13cd424cf6f --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/types/oad-types.ts @@ -0,0 +1,183 @@ +/** + * The file breakingChangeShared.ts contains members that: + * - are shared across 2 or more files in the "breakingChanges" folder. + * - AND do not depend on any members from beyond this file, except "@azure/swagger-validation-common". + * - "Deep-dive into breaking changes on spec PRs" + * https://aka.ms/azsdk/pr-brch-deep + * + * - "[Breaking Change][PR Workflow] Use more granular labels for Breaking Changes approvals" + * https://github.com/Azure/azure-sdk-tools/issues/6374 + */ +import { basename } from "path"; +import { getVersionFromInputFile, specificBranchHref } from "../utils/common-utils.js"; +import { MessageLevel } from "./message.js"; +import { sourceBranchHref } from "../utils/common-utils.js"; +import { ApiVersionLifecycleStage, Context } from "./breaking-change.js"; +import { defaultBreakingChangeBaseBranch } from "../command-helpers.js"; +import { readFileSync } from "fs"; +import { fileURLToPath } from "url"; +import { dirname, join } from "path"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const packageJson = JSON.parse(readFileSync(join(__dirname, "../../../package.json"), "utf-8")); +/** + * A type that represents AutoRest.Swagger.ComparisonMessage from OAD + * after being transformed by ComparisonMessage.GetValidationMessagesAsJson(). + * + * An OadMessage gets converted to ResultMessageRecord by the function oadMessagesToResultMessageRecords. + * + * For origin of data of this type and OAD source links, see comments in: + * runOad.ts + */ +export type OadMessage = { + readonly id: string; + readonly code: OadRuleCode; + readonly docUrl: string; + readonly message: string; + readonly mode: string; // "mode" is assigned from MessageType enum in openapi-diff, which is ["Addition", "Update", Removal"] + readonly type: MessageLevel; // "type" is assigned from openapi-diff's "Severity" , which is openapi-diff's Category enum, which is ["Info", "Warning", "Error"] + readonly new: ChangeProperties; + readonly old: ChangeProperties; + readonly groupName?: ApiVersionLifecycleStage; +}; + +export type ChangeProperties = { + readonly location?: string; + readonly path?: string; + readonly ref?: string; +}; + +/** + * Represents the state of OAD tracing with immutable data structure + */ +export type OadTraceData = { + readonly traces: readonly { + readonly old: string; + readonly new: string; + readonly baseBranch: string; + }[]; + readonly baseBranch: string; + readonly context: Context; +}; + +/** + * Creates a new OAD trace data structure + */ +export const createOadTrace = (context: Context): OadTraceData => ({ + traces: [], + baseBranch: defaultBreakingChangeBaseBranch, + context, +}); + +/** + * Adds a new trace entry to the OAD trace data + */ +export const addOadTrace = ( + traceData: OadTraceData, + oldSwagger: string, + newSwagger: string, +): OadTraceData => + ({ + ...traceData, + traces: [ + ...traceData.traces, + { old: oldSwagger, new: newSwagger, baseBranch: traceData.baseBranch }, + ], + }) as OadTraceData; + +/** + * Sets the base branch for the OAD trace data + */ +export const setOadBaseBranch = (traceData: OadTraceData, branchName: string): OadTraceData => ({ + ...traceData, + baseBranch: branchName, +}); + +/** + * Generates markdown content from OAD trace data + */ +export const generateOadMarkdown = (traceData: OadTraceData): string => { + const oadVersion = packageJson.dependencies?.["@azure/oad"]?.replace(/[\^~]/, "") || "unknown"; + if (traceData.traces.length === 0) { + return ""; + } + + // Create properly formatted markdown table without leading whitespace + let content = `| Compared specs ([v${oadVersion}](https://www.npmjs.com/package/@azure/oad/v/${oadVersion})) | new version | base version | +|-------|-------------|--------------| +`; + + for (const value of traceData.traces) { + // Compose each column for clarity + const newFileName = basename(value.new); + const newVersion = getVersionFromInputFile(value.new, true); + + // Truncate commit hash to first 8 characters for better readability + const shortCommit = traceData.context.headCommit.substring(0, 8); + const newCommitLink = `[${shortCommit}](${sourceBranchHref(value.new)})`; + + const oldVersion = getVersionFromInputFile(value.old, true); + const oldCommitLink = `[${value.baseBranch}](${specificBranchHref(value.old, value.baseBranch)})`; + + // Add a row to the markdown table with proper spacing + content += `| ${newFileName} | ${newVersion} ${newCommitLink} | ${oldVersion} ${oldCommitLink} | +`; + } + + return content; +}; + +// Codes correspond to members of openapi-diff ComparisonMessages: +// https://github.com/Azure/openapi-diff/blob/4c158308aca2cfd584e556fe8a05ce6967de2912/openapi-diff/src/modeler/AutoRest.Swagger/ComparisonMessages.cs +export type OadRuleCode = + | "AddedAdditionalProperties" + | "AddedEnumValue" + | "AddedOperation" + | "AddedOptionalProperty" + | "AddedPath" + | "AddedPropertyInResponse" + | "AddedReadOnlyPropertyInResponse" + | "AddedRequiredProperty" + | "AddedXmsEnum" + | "AddingHeader" + | "AddingOptionalParameter" + | "AddingRequiredParameter" + | "AddingResponseCode" + | "ArrayCollectionFormatChanged" + | "ChangedParameterOrder" + | "ConstantStatusHasChanged" + | "ConstraintChanged" + | "ConstraintIsStronger" + | "ConstraintIsWeaker" + | "DefaultValueChanged" + | "DifferentAllOf" + | "DifferentDiscriminator" + | "DifferentExtends" + | "ModifiedOperationId" + | "NoVersionChange" + | "ParameterInHasChanged" + | "ParameterLocationHasChanged" + | "ProtocolNoLongerSupported" + | "ReadonlyPropertyChanged" + | "ReferenceRedirection" + | "RemovedAdditionalProperties" + | "RemovedClientParameter" + | "RemovedDefinition" + | "RemovedEnumValue" + | "RemovedOperation" + | "RemovedOptionalParameter" + | "RemovedPath" + | "RemovedProperty" + | "RemovedRequiredParameter" + | "RemovedResponseCode" + | "RemovedXmsEnum" + | "RemovingHeader" + | "RequestBodyFormatNoLongerSupported" + | "RequiredStatusChange" + | "ResponseBodyFormatNowSupported" + | "TypeChanged" + | "TypeFormatChanged" + | "VersionsReversed" + | "XmsEnumChanged" + | "XmsLongRunningOperationChanged"; diff --git a/eng/tools/openapi-diff-runner/src/utils/apply-rules.ts b/eng/tools/openapi-diff-runner/src/utils/apply-rules.ts new file mode 100644 index 000000000000..9449787c1563 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/apply-rules.ts @@ -0,0 +1,130 @@ +/** + * By design, the only member exported from this file is the applyRules() function. + * + * In the "breakingChanges directory invocation depth" this file has depth 3 + * i.e. it is invoked by files with depth 2. + */ +import { + ApiVersionLifecycleStage, + BreakingChangesCheckType, + ReviewRequiredLabel, + VersioningReviewRequiredLabel, +} from "../types/breaking-change.js"; +import { OadMessage } from "../types/oad-types.js"; +import { BreakingChangeLabelsToBeAdded } from "../command-helpers.js"; + +import { + OadMessageRule, + fallbackLabel, + fallbackRule as fallbackOadMessageRule, + oadMessagesRuleMap, +} from "./oad-rule-map.js"; +import { logMessage, logWarning } from "../log.js"; + +/** + * The function applyRules() applies oadMessagesRuleMap to OAD messages returned by runOad(). + * As a result each OadMessage severity ("type" property) is set and appropriate "review required" labels are + * scheduled to be added. In addition, the API version lifecycle stage is added to the OadMessages so + * they e.g. render correctly in user UI. + * + * This function is invoked by the BreakingChangeDetector.doBreakingChangeDetection() + */ +export function applyRules( + oadMessages: OadMessage[], + scenario: BreakingChangesCheckType, + previousApiVersionLifecycleStage: ApiVersionLifecycleStage, +): OadMessage[] { + logMessage("ENTER definition applyRules"); + let outputOadMessages: OadMessage[] = []; + let outputOadMessage: OadMessage; + + for (const oadMessage of oadMessages) { + const rule: OadMessageRule | undefined = oadMessagesRuleMap.find( + (rule) => rule.scenario == scenario && rule.code == oadMessage.code, + ); + + if (rule !== undefined) { + outputOadMessage = applyRule(oadMessage, rule, previousApiVersionLifecycleStage); + } else { + logWarning( + `ASSERTION VIOLATION! No rule found for scenario: '${scenario}', oadMessage: '${JSON.stringify( + oadMessage, + )}'. Using fallback rule: '${JSON.stringify(fallbackOadMessageRule)}'.`, + ); + outputOadMessage = applyRule( + oadMessage, + { ...fallbackOadMessageRule, scenario }, + previousApiVersionLifecycleStage, + ); + } + + // The groupName is used to render the message in appropriate place in GitHub check page. + // See: buildCompletedBreakingChangeCheckResult.ts / getReports() + outputOadMessage = { + ...outputOadMessage, + groupName: previousApiVersionLifecycleStage, + }; + outputOadMessages.push(outputOadMessage); + } + + logMessage("RETURN definition applyRules"); + return outputOadMessages; +} + +function applyRule( + oadMessage: OadMessage, + rule: Omit, + previousApiVersionLifecycleStage: ApiVersionLifecycleStage, +): OadMessage { + const isSameVersionOnPreview = + previousApiVersionLifecycleStage === "preview" && rule.scenario === "SameVersion"; + + // Comparing against previous previews always decreases failure severity from error to warning. + // The fact we set this to true corresponds to the green "Ignore" rectangle for + // ("Cross-version" check / "Previous Preview" row / "Breaking Change" column) + // at https://aka.ms/azsdk/pr-brch-deep#diagram-explaining-breaking-changes-and-versioning-issues + // See also: + // https://github.com/Azure/azure-sdk-tools/issues/6396 + const isCrossVersionAgainstPreviousPreview = + previousApiVersionLifecycleStage === "preview" && rule.scenario === "CrossVersion"; + + const appliedSeverity = + rule.severity === "Error" && isCrossVersionAgainstPreviousPreview ? "Warning" : rule.severity; + + const addLabel = appliedSeverity === "Error"; + + let labelToAdd: ReviewRequiredLabel = fallbackLabel; + if (addLabel) { + if (rule.label == null) { + logWarning( + `ASSERTION VIOLATION! Missing "label" for 'Error' severity rule '${JSON.stringify( + rule, + )}'. Using fallback label '${labelToAdd}'.`, + ); + } else { + labelToAdd = rule.label; + } + + // This is the "Same-version check / Preview" row at: + // https://aka.ms/azsdk/pr-brch-deep#diagram-explaining-breaking-changes-and-versioning-issues + // Specifically, it overrides the "BreakingChangeReviewRequired" label in the "Breaking change" column. + labelToAdd = isSameVersionOnPreview ? VersioningReviewRequiredLabel : labelToAdd; + + // Note: these labels are processed downstream by addBreakingChangeLabelsToBeAdded(). + BreakingChangeLabelsToBeAdded.add(labelToAdd); + } + + // Here by design the oadMessage.type is ignored and overridden by appliedSeverity. + const outputOadMessage: OadMessage = { + ...oadMessage, + type: appliedSeverity, + }; + + logMessage( + `applyRule: addLabel: ${addLabel}, labelToAdd: ${labelToAdd}, rule: '${JSON.stringify( + rule, + )}', outputOadMessage: '${JSON.stringify(outputOadMessage)}'.`, + ); + + return outputOadMessage; +} diff --git a/eng/tools/openapi-diff-runner/src/utils/common-utils.ts b/eng/tools/openapi-diff-runner/src/utils/common-utils.ts new file mode 100644 index 000000000000..37d84cdd771f --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/common-utils.ts @@ -0,0 +1,207 @@ +import { existsSync, readFileSync } from "node:fs"; +import { FilePosition } from "../types/message.js"; +import { logMessage } from "../log.js"; + +export function blobHref(file: unknown): string { + // GitHub Actions scenario + if (process.env.GITHUB_ACTIONS) { + const repoName = process.env.GITHUB_HEAD_REPOSITORY || process.env.GITHUB_REPOSITORY; + const sha = process.env.GITHUB_SHA || process.env.GITHUB_EVENT_PULL_REQUEST_HEAD_SHA; + return `https://github.com/${repoName}/blob/${sha}/${file}`; + } + + // Local development scenario + return `${file}`; +} + +/** + * Get the Github url of targeted swagger file. + * @param file Swagger file starts with "specification" + */ +export function targetHref(file: string) { + return file + ? `https://github.com/${process.env.GITHUB_REPOSITORY}/blob/${getTargetBranch()}/${file}` + : ""; +} + +export function branchHref(file: string, branch: string = "main") { + return file ? `https://github.com/${process.env.GITHUB_REPOSITORY}/blob/${branch}/${file}` : ""; +} + +/** + * Gets the name of the target branch to which the PR is sent. + * If the environment variable is undefined then the method returns 'main' as the default value. + * @returns {string} branchName The target branch name. + */ +export function getTargetBranch(): string { + // For GitHub Actions, use GITHUB_BASE_REF for pull requests, fallback to GITHUB_REF_NAME for direct pushes + const githubBaseRef = process.env["GITHUB_BASE_REF"]; // Target branch for PR + const githubRefName = process.env["GITHUB_REF_NAME"]; // Current branch name + logMessage( + `@@@@@ process.env['GITHUB_BASE_REF'] - ${githubBaseRef}, process.env['GITHUB_REF_NAME'] - ${githubRefName}`, + ); + + // For pull requests, use GITHUB_BASE_REF (target branch) + // For direct pushes, use GITHUB_REF_NAME (current branch) or fallback to "main" + let result = githubBaseRef || githubRefName || "main"; + result = result.trim(); + logMessage(`>>>>> The target branch is: "${result}".`); + return result; +} + +/** + * For breaking change. Trim file path pattern to github style. + * E.g. Input: specification/redis/resource-manager/Microsoft.Cache/preview/2019-07-01/redis.json:191:5 + * Output: specification/redis/resource-manager/Microsoft.Cache/preview/2019-07-01/redis.json#L191:5 + * @param filePath + * + */ +export function getGithubStyleFilePath(filePath: string, filePos?: FilePosition): string { + if (filePos) { + return `${filePath}#L${filePos.line}:${filePos.column}`; + } + const regex = /(:)/; + return filePath.replace(regex, "#L"); +} + +export function getRelativeSwaggerPathToRepo( + filePath: string, + specDirPatterns: string[] = ["specification"], +): string { + const pattern = specDirPatterns.find((f) => filePath.indexOf(f) !== -1); + if (!pattern) { + return filePath.substring(process.env.BUILD_SOURCEDIRECTORY?.length || 1 - 1); + } + const position = filePath.search(pattern); + return filePath.substring(position, filePath.length); +} + +export function sourceBranchHref(file: string, filePos?: FilePosition): string { + return blobHref(getGithubStyleFilePath(getRelativeSwaggerPathToRepo(file), filePos)); +} + +export function targetBranchHref(file: string, filePos?: FilePosition): string { + return targetHref(getGithubStyleFilePath(getRelativeSwaggerPathToRepo(file), filePos)); +} + +export function specificBranchHref( + file: string, + branchName: string, + filePos?: FilePosition, +): string { + return branchHref( + getGithubStyleFilePath(getRelativeSwaggerPathToRepo(file), filePos), + branchName, + ); +} + +export function getVersionFromInputFile(filePath: string, withPreview = false): string | undefined { + const apiVersionRegex = /^\d{4}-\d{2}-\d{2}(|-preview|-privatepreview|-alpha|-beta|-rc)$/; + const segments = filePath.split("/"); + if (filePath.indexOf("data-plane") !== -1) { + if (segments && segments.length > 1) { + for (const s of segments.entries()) { + if (["stable", "preview"].some((v) => v === s[1])) { + const version = segments[s[0] + 1]; + if (version) { + return apiVersionRegex.test(version) && !withPreview + ? version.substring(0, 10) + : version; + } + } + } + } + } else { + if (segments && segments.length > 1) { + for (const s of segments) { + if (apiVersionRegex.test(s)) { + return withPreview ? s : s.substring(0, 10); + } + } + } + } + if (existsSync(filePath)) { + return JSON.parse(readFileSync(filePath).toString())?.info?.version; + } + return undefined; +} + +export function getArgumentValue(args: string[], flag: string, defaultValue: string): string { + const index = args.indexOf(flag); + return index !== -1 && index + 1 < args.length && args[index + 1] + ? args[index + 1] + : defaultValue; +} + +/** + * Truncates the input message to a specified maximum size. + * @param msg The message to be truncated. + * @param size The maximum length of the returned string. Defaults to 1024. + * @returns The truncated message, or an empty string if msg is undefined. + */ +export function cutoffMsg(msg: string | undefined, size: number = 1024): string { + if (!msg || msg.length <= size) { + return msg ? msg : ""; + } + return msg.substring(0, size); +} + +/** + * Post-processes the message of an error coming from OAD. + * Notably, the kind of errors that need post-processing is when OAD + * throws a runtime error because it has invoked AutoRest, it threw, + * and OAD has re-thrown it. + * We want to make such errors more readable, which we do here. + * Issue capturing this work and providing more context: + * https://github.com/Azure/azure-sdk-tools/issues/6998 + */ +export function processOadRuntimeErrorMessage( + message: string, + stackTraceMaxLength: number, +): string { + let outputMsg: string = ""; + + // Example "message" string, truncated with cutoffMsg(): + // + // Command failed: node "/mnt/vss/_work/_tasks/AzureApiValidation_5654d05d-82c1-48da-ad8f-161b817f6d41/0.0.59/common/temp/node_modules/.pnpm/https://github.com/Azure+oad@0.10.4/node_modules/autorest/dist/app.js" --v2 --input-file=specification/servicebus/resource-manager/Microsoft.ServiceBus/preview/2023-01-01-preview/namespace-preview.json --output-artifact=swagger-document.json --output-artifact=swagger-document.map --output-file=new --output-folder=/tmp\nERROR: Schema violation: Data does not match any schemas from 'oneOf'\n - file:///mnt/vss/_work/1/azure-rest-api-specs/specification/servicebus/resource-manager/Microsoft.ServiceBus/preview/2023-01-01-preview/namespace-preview.json:347:10 ($.paths["/subscriptions/subscriptionId/resourceGroups/resourceGroupName/providers/Microsoft.ServiceBus/namespaces/namespaceName/failover"].post["x-ms-examples"].NamespaceFailOver)\nFATAL: swagger-document/individual/schema-validator - FAILED\nFATAL: Error: [OperationAbortedException] Error occurred. Exiting.\nProcess() cancelled due to e + // + const oadAutorestInvocationRuntimeError = + message.startsWith("Command failed: node") && message.includes("autorest/dist/app.js"); + + if (oadAutorestInvocationRuntimeError) { + let lines: string[] = message.split(/[\r\n]+/); + + const introLine: string = + `Breaking change detector (OAD) invoked AutoRest. AutoRest threw a runtime error. ` + + `First ${stackTraceMaxLength} lines of stack trace follow, indexed. ` + + `First line should contain AutoRest command line invocation details. ` + + `Remaining lines should contain the main message reported by AutoRest.`; + + const stackTraceLines: string[] = lines + .filter((line) => line.length > 0) + .slice(0, Math.min(stackTraceMaxLength, lines.length)) + .map((line, i) => `${i + 1}: ${line}`); + + outputMsg = [introLine, "===================="] + .concat(stackTraceLines) + // We join with '
' as this will display correctly as a line break inside + // a cell of a table generated in given GitHub check description. + // This '
' will be interpreted downstream by + // 'msgInterfaceUtils.ts / commonHelper.renderExtra', + // as called by the 'checker.handlebars' template. + .join("
"); + } else { + outputMsg = cutoffMsg(message) || ""; + } + return outputMsg; +} + +/** + * Check if a spec path is a preview version + * @param specPath The specification file path to check. + * @returns {boolean} True if the spec path is a preview version, false otherwise. + */ +export function specIsPreview(specPath: string): boolean { + // Example input value: specification/maps/data-plane/Creator/preview/2022-09-01-preview/wayfind.json + return specPath.includes("/preview/") && !specPath.includes("/stable/"); +} diff --git a/eng/tools/openapi-diff-runner/src/utils/markdown-report-row.ts b/eng/tools/openapi-diff-runner/src/utils/markdown-report-row.ts new file mode 100644 index 000000000000..2ee70abd833e --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/markdown-report-row.ts @@ -0,0 +1,137 @@ +import { BrChMsgRecord, JsonPath, ResultMessageRecord } from "../types/message.js"; + +/** + * Represents a single row in the markdown table for GitHub check pane + */ +export interface BreakingChangeMdRow { + readonly msg: BrChMsgRecord; + readonly index: number; + readonly description: string; +} + +/** + * Creates markdown rows from breaking change messages + */ +export function createBreakingChangeMdRows(msgs: BrChMsgRecord[]): BreakingChangeMdRow[] { + const rows = msgs.map((msg, index) => ({ + msg, + index: index + 1, + description: getDescriptionColumn(msg), + })); + + return sortBreakingChangeMdRows(rows); +} + +/** + * Gets the markdown table header + */ +export function getMdTableHeader(): string { + return "| Index | Description |\n|-|-|\n"; +} + +/** + * Creates a deficit row for omitted occurrences + */ +export function getDeficitRow(deficit: number): string { + return `|| ⚠️ ${deficit} occurrence${deficit > 1 ? "s" : ""} omitted. See the build log.|\n`; +} + +/** + * Converts a row to markdown table row string + */ +export function rowToString(row: BreakingChangeMdRow): string { + return `| ${row.index} | ${row.description} |\n`; +} + +/** + * Gets the description column content for a message + */ +function getDescriptionColumn(msg: BrChMsgRecord): string { + if (msg.type === "Result") { + return getDescription(msg); + } else { + return getExtra(msg); + } +} + +/** + * Gets the full description including message and location + */ +function getDescription(msg: BrChMsgRecord): string { + return `${getMessage(msg)}
${getLocation(msg)}`; +} + +/** + * Gets the cleaned message text + */ +function getMessage(msg: BrChMsgRecord): string { + const re = /(\n|\t|\r)/gi; + return msg.message.replace(re, " "); +} + +/** + * Gets the location information for the message + */ +function getLocation(msg: BrChMsgRecord): string { + const paths: JsonPath[] = (msg as ResultMessageRecord).paths; + return paths + .filter((p) => p.path) + .map( + (p) => + `${p.tag}: [${getPathSegment(p.path)}](${p.path})${getJsonPathForNewOrOldTag(paths, p)}`, + ) + .join("
"); +} + +/** + * Gets the last 4 segments of a path + */ +function getPathSegment(path: string): string { + return path.split("/").slice(-4).join("/"); +} + +/** + * Gets the JSON path for new or old tags based on availability + */ +function getJsonPathForNewOrOldTag(allPaths: JsonPath[], targetPath: JsonPath): string { + const newPath = allPaths.find((p) => p.tag === "New"); + const newJsonPath = newPath?.jsonPath; + const newJsonPathPresent = newJsonPath != null && newJsonPath !== ""; + + const oldPath = allPaths.find((p) => p.tag === "Old"); + const oldJsonPath = oldPath?.jsonPath; + const oldJsonPathPresent = oldJsonPath != null && oldJsonPath !== ""; + + if (targetPath.tag === "New" && newJsonPathPresent) { + return prettyPrintJsonPath(newJsonPath); + } + + if (targetPath.tag === "Old" && !newJsonPathPresent && oldJsonPathPresent) { + return prettyPrintJsonPath(oldJsonPath); + } + + return ""; +} + +/** + * Pretty prints a JSON path with HTML formatting + */ +function prettyPrintJsonPath(jsonPath: string): string { + return `
${jsonPath}`; +} + +/** + * Gets the extra information for non-Result messages + */ +function getExtra(msg: BrChMsgRecord): string { + return JSON.stringify(msg.extra || {}) + .replace(/[{}]/g, "") + .replace(/,/g, ",
"); +} + +/** + * Sorts breaking change markdown rows by description + */ +function sortBreakingChangeMdRows(rows: BreakingChangeMdRow[]): BreakingChangeMdRow[] { + return rows.sort((row1, row2) => row1.description.localeCompare(row2.description)); +} diff --git a/eng/tools/openapi-diff-runner/src/utils/markdown-report.ts b/eng/tools/openapi-diff-runner/src/utils/markdown-report.ts new file mode 100644 index 000000000000..8679b3c13eeb --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/markdown-report.ts @@ -0,0 +1,232 @@ +import { + createBreakingChangeMdRows, + getMdTableHeader, + getDeficitRow, + rowToString, +} from "./markdown-report-row.js"; +import { BrChMsgRecord, MessageLevel, ResultMessageRecord, getKey } from "../types/message.js"; +import { logMessage, LogLevel } from "../log.js"; + +/** + * Represents a markdown report for breaking change violations + */ +export interface BreakingChangeMdReport { + readonly msgs: BrChMsgRecord[]; + readonly rows: string[]; + readonly type: BrChMsgRecord["type"]; + readonly level: MessageLevel; + readonly id?: string; + readonly rawMessage: string; +} + +/** + * Type mappings for sorting + */ +const TYPE_ORDER_MAP: Record = { + Raw: 0, + Result: 1, +}; + +const LEVEL_ORDER_MAP: Record = { + Info: 0, + Warning: 1, + Error: 2, +}; + +/** + * Creates a breaking change markdown report + */ +export function createBreakingChangeMdReport(msgs: BrChMsgRecord[]): BreakingChangeMdReport { + // Validation + validateMessages(msgs); + + const rows = buildRows(msgs); + const firstMsg = msgs[0]; + + return { + msgs, + rows, + type: firstMsg.type, + level: firstMsg.level, + id: firstMsg.type === "Result" ? firstMsg.id : undefined, + rawMessage: firstMsg.type === "Raw" ? firstMsg.message : "", + }; +} + +/** + * Sorts breaking change reports according to specified criteria + */ +export function sortBreakingChangeMdReports( + reports: BreakingChangeMdReport[], +): BreakingChangeMdReport[] { + return reports.sort((rep1, rep2) => { + const typeCompare = compareByType(rep1, rep2); + if (typeCompare !== 0) return typeCompare; + + const levelCompare = compareByLevel(rep1, rep2); + if (levelCompare !== 0) return -levelCompare; + + const idCompare = compareById(rep1, rep2); + if (idCompare !== 0) return idCompare; + + return 0; + }); +} + +/** + * Gets the length of the report when rendered as string + */ +export function getReportLength(report: BreakingChangeMdReport, maxRowCount: number): number { + return reportToString(report, maxRowCount).length; +} + +/** + * Converts a report to markdown string + */ +export function reportToString(report: BreakingChangeMdReport, maxRowCount: number): string { + return ( + getPreamble(report, maxRowCount) + getMdTableHeader() + getRows(report, maxRowCount).join("") + ); +} + +/** + * Gets the number of rows in the report + */ +export function getRowCount(report: BreakingChangeMdReport): number { + return report.rows.length; +} + +/** + * Validates the input messages + */ +function validateMessages(msgs: BrChMsgRecord[]): void { + if (msgs.length === 0) { + logMessage(`BreakingChangeMdReport: ASSERTION VIOLATION! msgs are of length 0.`, LogLevel.Warn); + } + + if (msgs.some((msg) => msg.type !== msgs[0].type)) { + logMessage( + `BreakingChangeMdReport: ASSERTION VIOLATION! Not all messages have the same type. msgs[0].type = ${msgs[0].type}.`, + LogLevel.Warn, + ); + } + + if (msgs.some((msg) => msg.groupName !== msgs[0].groupName)) { + logMessage( + `BreakingChangeMdReport: ASSERTION VIOLATION! Not all messages have the same groupName. msgs[0].groupName = '${msgs[0].groupName}'.`, + LogLevel.Warn, + ); + } +} + +/** + * Builds the markdown table rows + */ +function buildRows(msgs: BrChMsgRecord[]): string[] { + return createBreakingChangeMdRows(msgs).map(rowToString); +} + +/** + * Gets the preamble text for the report + */ +function getPreamble(report: BreakingChangeMdReport, maxRowCount: number): string { + const ruleColumn = getRuleColumn(report.msgs[0]); + const mainMsg = `## ${ruleColumn}\nDisplaying ${Math.min(maxRowCount, report.msgs.length)} out of ${report.msgs.length} occurrences.\n`; + const moreRowsMsg = + report.msgs.length > maxRowCount + ? `⚠️ To view the remaining ${report.msgs.length - maxRowCount} occurrences, see the build log.\n` + : ""; + return mainMsg + moreRowsMsg; +} + +/** + * Gets the rows to display, including deficit row if needed + */ +function getRows(report: BreakingChangeMdReport, maxRowCount: number): string[] { + let rows = report.rows.slice(0, maxRowCount); + const deficit = report.msgs.length - maxRowCount; + if (deficit > 0) { + rows = rows.concat([getDeficitRow(deficit)]); + } + return rows; +} + +/** + * Gets the rule column text + */ +function getRuleColumn(msg: BrChMsgRecord): string { + if (msg.type === "Result") { + return getRuleName(msg); + } else { + return `${getMark(msg)} ${msg.message}`; + } +} + +/** + * Gets the rule name with link + */ +function getRuleName(msg: ResultMessageRecord): string { + return `${getMark(msg)} [${getKey(msg)}](${msg.docUrl})`; +} + +/** + * Gets the emoji mark for the message level + */ +function getMark(msgRecord: BrChMsgRecord): string { + switch (msgRecord.level) { + case "Error": + return ""; + case "Info": + return ":speech_balloon:"; + case "Warning": + return ":warning:"; + } +} + +/** + * Compares reports by type + */ +function compareByType(rep1: BreakingChangeMdReport, rep2: BreakingChangeMdReport): number { + return TYPE_ORDER_MAP[rep1.type] - TYPE_ORDER_MAP[rep2.type]; +} + +/** + * Compares reports by message level + */ +function compareByLevel(rep1: BreakingChangeMdReport, rep2: BreakingChangeMdReport): number { + return LEVEL_ORDER_MAP[rep1.level] - LEVEL_ORDER_MAP[rep2.level]; +} + +/** + * Compares reports by ID or message + */ +function compareById(rep1: BreakingChangeMdReport, rep2: BreakingChangeMdReport): number { + if (rep1.type === "Result" && rep2.type === "Result") { + const thisId = getIntFromId(rep1.id); + const otherId = getIntFromId(rep2.id); + if (typeof thisId === "number" && typeof otherId === "number") { + return thisId - otherId; + } + return getStrFromId(rep1.id).localeCompare(getStrFromId(rep2.id)); + } + if (rep1.type === "Raw" && rep2.type === "Raw") { + return rep1.rawMessage.localeCompare(rep2.rawMessage); + } + return 0; +} + +/** + * Converts ID string to number if possible + */ +function getIntFromId(id?: string): number | undefined { + const idStr = getStrFromId(id); + const idInt = parseInt(idStr); + return isNaN(idInt) ? undefined : idInt; +} + +/** + * Gets string representation of ID + */ +function getStrFromId(id?: string): string { + return id ?? ""; +} diff --git a/eng/tools/openapi-diff-runner/src/utils/oad-message-processor.ts b/eng/tools/openapi-diff-runner/src/utils/oad-message-processor.ts new file mode 100644 index 000000000000..82810ba10092 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/oad-message-processor.ts @@ -0,0 +1,188 @@ +import path from "path"; +import fs from "fs"; +import { OadMessage } from "../types/oad-types.js"; +import { JsonPath, MessageLevel, ResultMessageRecord } from "../types/message.js"; +import { sourceBranchHref, specificBranchHref } from "./common-utils.js"; +import { logFileName } from "../types/breaking-change.js"; +import { defaultBreakingChangeBaseBranch as defaultBaseBranch } from "../command-helpers.js"; +import { logMessage } from "../log.js"; + +/** + * Context for OAD message processing operations + */ +export interface OadMessageProcessorContext { + logFilePath: string; + prUrl: string; + messageCache: OadMessage[]; +} + +/** + * Convert OAD messages to result message records + */ +export function convertOadMessagesToResultMessageRecords( + messages: OadMessage[], + baseBranchName: string | null = null, +): ResultMessageRecord[] { + return messages.map((oadMessage) => { + // These paths will be printed out to GitHub check pane table row by invocations to + // BreakingChangeMdRow.ts / getLocation + const paths: JsonPath[] = []; + if (oadMessage.new.location) { + paths.push({ + tag: "New", + path: sourceBranchHref(oadMessage.new.location || ""), + jsonPath: oadMessage.new?.path, + }); + } + if (oadMessage.old.location) { + paths.push({ + tag: "Old", + path: specificBranchHref( + oadMessage.old.location || "", + baseBranchName || defaultBaseBranch, + ), + jsonPath: oadMessage.old?.path, + }); + } + return { + type: "Result", + level: oadMessage.type as MessageLevel, + message: oadMessage.message, + code: oadMessage.code, + id: oadMessage.id, + docUrl: oadMessage.docUrl, + time: new Date(), + groupName: oadMessage.groupName, + extra: { + mode: oadMessage.mode, + }, + paths: paths, + } as ResultMessageRecord; + }); +} + +/** + * Create a new OAD message processor context + */ +export function createOadMessageProcessor( + logFileFolder: string, + prUrl: string, +): OadMessageProcessorContext { + const logFilePath = path.join(logFileFolder || ".", logFileName); + + // Remove the log file if it exists + if (fs.existsSync(logFilePath)) { + fs.unlinkSync(logFilePath); + } + + // Create the log file explicitly + fs.writeFileSync(logFilePath, ""); + + return { + logFilePath, + prUrl, + messageCache: [], + }; +} + +/** + * Create a deterministic key for an OAD message to enable deduplication + */ +export function createMessageKey(message: OadMessage): string { + // Create a deterministic key based on message properties that define uniqueness + // Adjust these properties based on what makes an OadMessage unique + return JSON.stringify({ + code: message.code, + message: message.message, + type: message.type, + mode: message.mode, + newLocation: message.new?.location, + oldLocation: message.old?.location, + newPath: message.new?.path, + oldPath: message.old?.path, + // Add other properties that determine uniqueness + }); +} + +/** + * Append a message to the log file + */ +export function appendToLogFile(logFilePath: string, msg: string): void { + fs.appendFileSync(logFilePath, msg); + fs.appendFileSync(logFilePath, "\n"); + logMessage("oad-message-processor.appendMsg: " + msg); +} + +/** + * Append markdown content to the log file + */ +export function appendMarkdownToLog( + context: OadMessageProcessorContext, + errorMsg: string, + levelType = "Error", +): void { + const markdownRecord = JSON.stringify({ + type: "Markdown", + mode: "append", + level: levelType, + message: errorMsg, + time: new Date(), + }); + appendToLogFile(context.logFilePath, markdownRecord); +} + +/** + * Process and deduplicate OAD messages, then append to log + * This function is invoked by BreakingChangeDetector.doBreakingChangeDetection() + */ +export function processAndAppendOadMessages( + context: OadMessageProcessorContext, + oadMessages: OadMessage[], + baseBranch: string, +): ResultMessageRecord[] { + // Use Set for O(1) lookup instead of O(n) array operations + const cacheKeys = new Set(context.messageCache.map((msg) => createMessageKey(msg))); + + // Filter out duplicates - O(n) instead of O(n²) + const dedupedOadMessages = oadMessages.filter((oadMessage) => { + const key = createMessageKey(oadMessage); + return !cacheKeys.has(key); + }); + + // Count duplicates for logging + const duplicateCount = oadMessages.length - dedupedOadMessages.length; + // We are using this log as a metric to track and measure impact of the work on improving "breaking changes" tooling. + // Log statement added around 12/5/2023. + // See: https://github.com/Azure/azure-sdk-tools/issues/7223#issuecomment-1839830834 + // TODO output PR information. + logMessage( + `oad-message-processor.processAndAppendOadMessages: PR:${context.prUrl}, baseBranch: ${baseBranch}, ` + + `oadMessages.length: ${oadMessages.length}, duplicateOadMessages.length: ${duplicateCount}, ` + + `messageCache.length: ${context.messageCache.length}.`, + ); + + context.messageCache.push(...dedupedOadMessages); + + const msgs: ResultMessageRecord[] = convertOadMessagesToResultMessageRecords( + dedupedOadMessages, + baseBranch, + ); + + appendToLogFile(context.logFilePath, JSON.stringify(msgs)); + + return msgs; +} + +/** + * Clear the message cache for testing purposes + */ +export function clearMessageCache(context: OadMessageProcessorContext): void { + context.messageCache = []; +} + +/** + * Get the current message cache size + */ +export function getMessageCacheSize(context: OadMessageProcessorContext): number { + return context.messageCache.length; +} diff --git a/eng/tools/openapi-diff-runner/src/utils/oad-rule-map.ts b/eng/tools/openapi-diff-runner/src/utils/oad-rule-map.ts new file mode 100644 index 000000000000..d1ddd9caa14d --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/oad-rule-map.ts @@ -0,0 +1,148 @@ +import { MessageLevel } from "../types/message.js"; +import { OadRuleCode } from "../types/oad-types.js"; +import { BreakingChangesCheckType, ReviewRequiredLabel } from "../types/breaking-change.js"; + +/** + * This oadMessagesRuleMap is applied by applyRules() function, invoked by BreakingChangeDetector, + * to messages returned from OAD (i.e. from a call to runOad() function). + * + * The oadMessagesRuleMap is expected to: + * (a) have entry for every possible combination of (scenario, code). + * (b) have "label" defined if severity is "Error". + * + * If (a) is violated, then const fallbackRule will be used instead. + * If (b) is violated, then const fallbackLabel will be used instead. + */ +// prettier-ignore +export const oadMessagesRuleMap: OadMessagesRuleMap = [ + { scenario: "CrossVersion" , code: "AddedAdditionalProperties" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddedEnumValue" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddedOperation" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddedOptionalProperty" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddedPath" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddedPropertyInResponse" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddedReadOnlyPropertyInResponse" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddedRequiredProperty" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddedXmsEnum" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddingHeader" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddingOptionalParameter" , severity: "Info" }, + { scenario: "CrossVersion" , code: "AddingRequiredParameter" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "AddingResponseCode" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ArrayCollectionFormatChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ChangedParameterOrder" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ConstantStatusHasChanged" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ConstraintChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ConstraintIsStronger" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ConstraintIsWeaker" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "DefaultValueChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "DifferentAllOf" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "DifferentDiscriminator" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "DifferentExtends" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ModifiedOperationId" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "NoVersionChange" , severity: "Info" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ParameterInHasChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ParameterLocationHasChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ProtocolNoLongerSupported" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ReadonlyPropertyChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ReferenceRedirection" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedAdditionalProperties" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedClientParameter" , severity: "Info" , label: "BreakingChangeReviewRequired" }, // [1] + { scenario: "CrossVersion" , code: "RemovedDefinition" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedEnumValue" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedOperation" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedOptionalParameter" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedPath" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedProperty" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedRequiredParameter" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovedResponseCode" , severity: "Info" }, + { scenario: "CrossVersion" , code: "RemovedXmsEnum" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RemovingHeader" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RequestBodyFormatNoLongerSupported" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "RequiredStatusChange" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "ResponseBodyFormatNowSupported" , severity: "Info" }, + { scenario: "CrossVersion" , code: "TypeChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "TypeFormatChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "VersionsReversed" , severity: "Warning" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "XmsEnumChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "CrossVersion" , code: "XmsLongRunningOperationChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "AddedAdditionalProperties" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedEnumValue" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "AddedOperation" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedOptionalProperty" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedPath" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedPropertyInResponse" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedReadOnlyPropertyInResponse" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddedRequiredProperty" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "AddedXmsEnum" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "AddingHeader" , severity: "Info" }, + { scenario: "SameVersion" , code: "AddingOptionalParameter" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "AddingRequiredParameter" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "AddingResponseCode" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ArrayCollectionFormatChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ChangedParameterOrder" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ConstantStatusHasChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ConstraintChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ConstraintIsStronger" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ConstraintIsWeaker" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "DefaultValueChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "DifferentAllOf" , severity: "Warning" }, + { scenario: "SameVersion" , code: "DifferentDiscriminator" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "DifferentExtends" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ModifiedOperationId" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "NoVersionChange" , severity: "Info" }, + { scenario: "SameVersion" , code: "ParameterInHasChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ParameterLocationHasChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ProtocolNoLongerSupported" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ReadonlyPropertyChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ReferenceRedirection" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedAdditionalProperties" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedClientParameter" , severity: "Info" , label: "BreakingChangeReviewRequired" }, // [1] + { scenario: "SameVersion" , code: "RemovedDefinition" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedEnumValue" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedOperation" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedOptionalParameter" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedPath" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedProperty" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedRequiredParameter" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedResponseCode" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "RemovedXmsEnum" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RemovingHeader" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RequestBodyFormatNoLongerSupported" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "RequiredStatusChange" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "ResponseBodyFormatNowSupported" , severity: "Error" , label: "VersioningReviewRequired" }, + { scenario: "SameVersion" , code: "TypeChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "TypeFormatChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "VersionsReversed" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "XmsEnumChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, + { scenario: "SameVersion" , code: "XmsLongRunningOperationChanged" , severity: "Error" , label: "BreakingChangeReviewRequired" }, +]; + +/** + * See comment on oadMessagesRuleMap. + */ +export const fallbackLabel: ReviewRequiredLabel = "BreakingChangeReviewRequired"; + +/** + * See comment on oadMessagesRuleMap. + */ +export const fallbackRule: Omit = { + severity: "Error", + label: fallbackLabel, +}; + +// [1]: Reduced RemovedClientParameter severity from "Error" to "Info" per https://github.com/Azure/azure-sdk-tools/issues/5025 + +/** + * Type of entry of oadMessagesRuleMap. + */ +export type OadMessageRule = { + scenario: BreakingChangesCheckType; + code: OadRuleCode; + severity: MessageLevel; + label?: ReviewRequiredLabel; +}; + +/** + * See comment on oadMessagesRuleMap. + */ +export type OadMessagesRuleMap = OadMessageRule[]; diff --git a/eng/tools/openapi-diff-runner/src/utils/pull-request.ts b/eng/tools/openapi-diff-runner/src/utils/pull-request.ts new file mode 100644 index 000000000000..47a786d5ecd0 --- /dev/null +++ b/eng/tools/openapi-diff-runner/src/utils/pull-request.ts @@ -0,0 +1,152 @@ +import { existsSync, mkdirSync } from "node:fs"; +import path from "node:path"; +import { simpleGit } from "simple-git"; +import { Context } from "../types/breaking-change.js"; +import { logError, logMessage } from "../log.js"; + +/** + * Properties of Pull Request in Azure DevOps CI. + */ +export type PullRequestProperties = { + /** + * Target Branch, for example `main`. + */ + readonly targetBranch: string; + + /** + * Source Branch, for example `myname/newchanges`. + */ + readonly sourceBranch: string; + + /** + * Base Branch for breaking change detection, for example `main`. + */ + baseBranch: string; + + /** + * the PR repo current branch. + */ + currentBranch: string; + + /** + * Working folder for a cloned directory. We can't switch branches in the original Git repository + * so we use cloned repository. + */ + readonly workingDir: string; + + /** + * Checkout Git branch, for example, it can be `targetBranch` or `sourceBranch`. + */ + readonly checkout: (branch: string) => Promise; +}; + +const sourceBranch = "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"; + +const options = { + baseDir: process.cwd(), + binary: "git", + maxConcurrentProcesses: 1, +}; + +/** + * It creates a clone of the Git repository and returns properties of the Pull Request, such as + * `targetBranch` and `sourceBranch`. + * the `cwd` should point to the source Git repository. + */ +export const createPullRequestProperties = async ( + context: Context, + prefix: string, + skipInitializeBase: boolean = false, +): Promise => { + const baseBranch = context.baseBranch; + if (baseBranch === undefined) { + return undefined; + } + const originGitRepository = simpleGit({ ...options, baseDir: context.localSpecRepoPath }); + // Helper function to safely create a branch if it doesn't exist + const createBranchIfNotExists = async (branchName: string, startPoint?: string) => { + try { + // Get fresh branch list to avoid stale data + const currentBranches = await originGitRepository.branch(); + if (!currentBranches.all.includes(branchName)) { + const branchArgs = startPoint ? [branchName, startPoint] : [branchName]; + await originGitRepository.branch(branchArgs); + logMessage(`Created branch ${branchName}${startPoint ? ` from ${startPoint}` : ""}`); + } else { + logMessage(`Branch ${branchName} already exists, skipping creation`); + } + } catch (error: any) { + // If the error is about branch already existing, that's fine - continue + if ( + error.message?.includes("already exists") || + error.message?.includes("fatal: a branch named") + ) { + logMessage(`Branch ${branchName} already exists (caught during creation), continuing`); + } else { + logError(`Failed to create branch ${branchName}: ${error.message}`); + throw error; + } + } + }; + + // Create branches if they don't exist + await createBranchIfNotExists(sourceBranch); + + if (!skipInitializeBase) { + await createBranchIfNotExists(baseBranch, `remotes/origin/${baseBranch}`); + } + + await createBranchIfNotExists(context.prTargetBranch, `remotes/origin/${context.prTargetBranch}`); + + // we have to clone the repository because we need to switch branches. + // Switching branches in the current repository can be dangerous because Avocado + // may be running from it. + const workingDir = path.resolve( + path.join(process.cwd(), "..", `${prefix}-c93b354fd9c14905bb574a8834c4d69b`), + ); + if (!existsSync(workingDir)) { + mkdirSync(workingDir); + } + const workingGitRepository = simpleGit({ ...options, baseDir: workingDir }); + await workingGitRepository.init(); + + // Check if origin remote already exists, if not add it + try { + const remotes = await workingGitRepository.getRemotes(); + const originExists = remotes.some((remote: any) => remote.name === "origin"); + if (!originExists) { + await workingGitRepository.addRemote("origin", context.localSpecRepoPath); + } + } catch (error) { + // If getting remotes fails, try to add origin anyway and catch the error + try { + await workingGitRepository.addRemote("origin", context.localSpecRepoPath); + } catch (addRemoteError: any) { + // Ignore the error if remote already exists + if (!addRemoteError?.message?.includes("remote origin already exists")) { + throw addRemoteError; + } + } + } + await workingGitRepository.pull("origin", context.prTargetBranch); + await workingGitRepository.fetch("origin", `${sourceBranch}`); + if (!skipInitializeBase) { + await workingGitRepository.fetch("origin", `${baseBranch}`); + } + await workingGitRepository.checkout(context.prTargetBranch); + + return { + baseBranch: context.prTargetBranch, + targetBranch: context.prTargetBranch, + sourceBranch, + workingDir, + checkout: async function (this: any, branch: string) { + if (this.currentBranch !== branch) { + await workingGitRepository.checkout([branch]); + logMessage(`checkout to ${branch} in ${workingDir}`); + this.currentBranch = branch; + } + }, + currentBranch: context.prTargetBranch, + }; +}; diff --git a/eng/tools/openapi-diff-runner/test/command-helpers.test.ts b/eng/tools/openapi-diff-runner/test/command-helpers.test.ts new file mode 100644 index 000000000000..3f949ba39c3c --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/command-helpers.test.ts @@ -0,0 +1,964 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { existsSync, mkdirSync, readFileSync, writeFileSync, rmSync } from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { + initContext, + BreakingChangeLabelsToBeAdded, + getSwaggerDiffs, + buildPrInfo, + changeBaseBranch, + logFullOadMessagesList, + createDummySwagger, + cleanDummySwagger, + isSameVersionBreakingType, + getCreatedDummySwaggerCount, + outputBreakingChangeLabelVariables, +} from "../src/command-helpers.js"; +import { + Context, + BreakingChangeReviewRequiredLabel, + VersioningReviewRequiredLabel, +} from "../src/types/breaking-change.js"; +import { ResultMessageRecord } from "../src/types/message.js"; +import { getChangedFilesStatuses } from "@azure-tools/specs-shared/changed-files"; + +// Mock dependencies +vi.mock("node:fs"); +vi.mock("node:path"); +vi.mock("node:url"); +vi.mock("../src/utils/common-utils.js"); +vi.mock("../src/utils/oad-message-processor.js"); +vi.mock("../src/utils/pull-request.js"); +vi.mock("../src/log.js"); +vi.mock("@azure-tools/specs-shared/changed-files", async () => { + const actual = await vi.importActual("@azure-tools/specs-shared/changed-files"); + return { + ...actual, + getChangedFilesStatuses: vi.fn(), + }; +}); + +describe("command-helpers", () => { + const mockExistsSync = vi.mocked(existsSync); + const mockMkdirSync = vi.mocked(mkdirSync); + const mockReadFileSync = vi.mocked(readFileSync); + const mockWriteFileSync = vi.mocked(writeFileSync); + const mockRmSync = vi.mocked(rmSync); + const mockPath = vi.mocked(path); + const mockFileURLToPath = vi.mocked(fileURLToPath); + const mockGetChangedFilesStatuses = vi.mocked(getChangedFilesStatuses); + + beforeEach(() => { + vi.clearAllMocks(); + + // Setup default mocks + mockPath.resolve.mockImplementation((...paths) => paths.join("/")); + mockPath.join.mockImplementation((...paths) => paths.join("/")); + mockPath.dirname.mockImplementation((p) => p.split("/").slice(0, -1).join("/")); + mockFileURLToPath.mockReturnValue("/path/to/file.js"); + + // Mock console methods to avoid test output noise + vi.spyOn(console, "log").mockImplementation(() => {}); + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Clear BreakingChangeLabelsToBeAdded set + BreakingChangeLabelsToBeAdded.clear(); + + // Clean up any dummy swagger files between tests + cleanDummySwagger(); + }); + + afterEach(() => { + vi.resetAllMocks(); + }); + + describe("initContext", () => { + beforeEach(() => { + // Mock process.argv + vi.stubGlobal("process", { + ...process, + argv: ["node", "script.js", "--repo=test/repo", "--number=123"], + }); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + }); + + it("should initialize context with default values", async () => { + const { getArgumentValue } = await import("../src/utils/common-utils.js"); + const { createOadMessageProcessor } = await import("../src/utils/oad-message-processor.js"); + + vi.mocked(getArgumentValue).mockImplementation((_args, key, defaultValue) => { + const argMap: Record = { + "--repo": "test/repo", + "--number": "123", + "--rt": "SameVersion", + "--bb": "main", + "--hc": "HEAD", + "--sb": "", + "--tb": "", + }; + return argMap[key] || defaultValue || ""; + }); + + mockExistsSync.mockReturnValue(false); + vi.mocked(createOadMessageProcessor).mockReturnValue({ + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }); + + const context = initContext(); + + expect(context.repo).toBe("test/repo"); + expect(context.prNumber).toBe("123"); + expect(context.runType).toBe("SameVersion"); + expect(context.baseBranch).toBe("main"); + expect(context.headCommit).toBe("HEAD"); + expect(context.checkName).toBe("Swagger BreakingChange"); + expect(context.prUrl).toBe("https://github.com/test/repo/pull/123"); + expect(mockMkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true }); + }); + + it("should use custom values when provided", async () => { + const { getArgumentValue } = await import("../src/utils/common-utils.js"); + const { createOadMessageProcessor } = await import("../src/utils/oad-message-processor.js"); + + vi.mocked(getArgumentValue).mockImplementation((_args, key, defaultValue) => { + const argMap: Record = { + "--repo": "custom/repo", + "--number": "456", + "--rt": "CrossVersion", + "--bb": "develop", + "--hc": "abc123", + "--sb": "feature-branch", + "--tb": "main", + }; + return argMap[key] || defaultValue || ""; + }); + + mockExistsSync.mockReturnValue(true); + vi.mocked(createOadMessageProcessor).mockReturnValue({ + logFilePath: "/log/path", + prUrl: "https://github.com/custom/repo/pull/456", + messageCache: [], + }); + + const context = initContext(); + + expect(context.repo).toBe("custom/repo"); + expect(context.prNumber).toBe("456"); + expect(context.runType).toBe("CrossVersion"); + expect(context.baseBranch).toBe("develop"); + expect(context.headCommit).toBe("abc123"); + expect(context.prSourceBranch).toBe("feature-branch"); + expect(context.prTargetBranch).toBe("main"); + expect(context.checkName).toBe("BreakingChange(Cross-Version)"); + expect(mockMkdirSync).not.toHaveBeenCalled(); + }); + + it("should create log file folder if it doesn't exist", async () => { + const { getArgumentValue } = await import("../src/utils/common-utils.js"); + const { createOadMessageProcessor } = await import("../src/utils/oad-message-processor.js"); + + vi.mocked(getArgumentValue).mockReturnValue(""); + mockExistsSync.mockReturnValue(false); + vi.mocked(createOadMessageProcessor).mockReturnValue({ + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }); + + initContext(); + + expect(mockMkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true }); + }); + }); + + describe("BreakingChangeLabelsToBeAdded", () => { + it("should be a Set that can be modified", () => { + expect(BreakingChangeLabelsToBeAdded).toBeInstanceOf(Set); + expect(BreakingChangeLabelsToBeAdded.size).toBe(0); + + BreakingChangeLabelsToBeAdded.add("test-label"); + expect(BreakingChangeLabelsToBeAdded.has("test-label")).toBe(true); + expect(BreakingChangeLabelsToBeAdded.size).toBe(1); + + BreakingChangeLabelsToBeAdded.clear(); + expect(BreakingChangeLabelsToBeAdded.size).toBe(0); + }); + }); + + describe("getSwaggerDiffs", () => { + it("should return changed files successfully", async () => { + const mockResult = { + additions: [ + "specification/foo/resource-manager/Microsoft.Foo/stable/2023-01-01/foo.json", + "specification/bar/data-plane/stable/2023-01-01/bar.json", + ], + modifications: [ + "specification/baz/resource-manager/Microsoft.Baz/stable/2023-01-01/baz.json", + ], + deletions: ["specification/qux/data-plane/stable/2023-01-01/qux.json"], + renames: [ + { + from: "specification/old/resource-manager/Microsoft.Old/stable/2023-01-01/old.json", + to: "specification/new/resource-manager/Microsoft.New/stable/2023-01-01/new.json", + }, + ], + total: 5, + }; + + mockGetChangedFilesStatuses.mockResolvedValue(mockResult); + + const result = await getSwaggerDiffs({ + baseCommitish: "main", + cwd: "/test/path", + headCommitish: "HEAD", + }); + + expect(result).toEqual(mockResult); + expect(mockGetChangedFilesStatuses).toHaveBeenCalledWith({ + baseCommitish: "main", + cwd: "/test/path", + headCommitish: "HEAD", + }); + }); + + it("should return empty result on error", async () => { + mockGetChangedFilesStatuses.mockRejectedValue(new Error("Git error")); + + const result = await getSwaggerDiffs(); + + expect(result).toEqual({ + additions: [], + modifications: [], + deletions: [], + renames: [], + total: 0, + }); + expect(console.error).toHaveBeenCalledWith( + "Error getting categorized changed files:", + expect.any(Error), + ); + }); + + it("should filter out non-Swagger files", async () => { + const mockResult = { + additions: [ + "specification/foo/resource-manager/Microsoft.Foo/stable/2023-01-01/foo.json", // Valid Swagger + ".github/workflows/test.yaml", // Non-Swagger (YAML) + "specification/bar/resource-manager/Microsoft.Bar/stable/2023-01-01/examples/example.json", // Example file + "README.md", // Non-JSON + "specification/baz/data-plane/stable/2023-01-01/baz.json", // Valid Swagger + ], + modifications: [ + "specification/qux/resource-manager/Microsoft.Qux/stable/2023-01-01/qux.json", // Valid Swagger + "package.json", // Non-Swagger JSON + ], + deletions: [ + "specification/old/data-plane/stable/2023-01-01/old.json", // Valid Swagger + "dist/build.js", // Non-Swagger + ], + renames: [ + { + from: "specification/old/resource-manager/Microsoft.Old/stable/2023-01-01/old.json", // Valid Swagger + to: "specification/new/resource-manager/Microsoft.New/stable/2023-01-01/new.json", // Valid Swagger + }, + { + from: "old-readme.md", // Non-Swagger + to: "new-readme.md", // Non-Swagger + }, + ], + total: 9, + }; + + mockGetChangedFilesStatuses.mockResolvedValue(mockResult); + + const result = await getSwaggerDiffs(); + + // Only Swagger files should be returned + expect(result).toEqual({ + additions: [ + "specification/foo/resource-manager/Microsoft.Foo/stable/2023-01-01/foo.json", + "specification/baz/data-plane/stable/2023-01-01/baz.json", + ], + modifications: [ + "specification/qux/resource-manager/Microsoft.Qux/stable/2023-01-01/qux.json", + ], + deletions: ["specification/old/data-plane/stable/2023-01-01/old.json"], + renames: [ + { + from: "specification/old/resource-manager/Microsoft.Old/stable/2023-01-01/old.json", + to: "specification/new/resource-manager/Microsoft.New/stable/2023-01-01/new.json", + }, + ], + total: 5, + }); + }); + + it("should use default options when none provided", async () => { + const mockResult = { + additions: [], + modifications: [], + deletions: [], + renames: [], + total: 0, + }; + + mockGetChangedFilesStatuses.mockResolvedValue(mockResult); + + await getSwaggerDiffs(); + + expect(mockGetChangedFilesStatuses).toHaveBeenCalledWith({ + baseCommitish: undefined, + cwd: undefined, + headCommitish: undefined, + }); + }); + }); + + describe("buildPrInfo", () => { + it("should build PR info successfully", async () => { + const { createPullRequestProperties } = await import("../src/utils/pull-request.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + }; + + const mockPrInfo = { + baseBranch: "main", + targetBranch: "main", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "main", + checkout: vi.fn(), + }; + + vi.mocked(createPullRequestProperties).mockResolvedValue(mockPrInfo); + + await buildPrInfo(mockContext); + + expect(mockContext.prInfo).toBe(mockPrInfo); + expect(createPullRequestProperties).toHaveBeenCalledWith(mockContext, "same-version"); + }); + + it("should use cross-version prefix for CrossVersion run type", async () => { + const { createPullRequestProperties } = await import("../src/utils/pull-request.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "CrossVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + }; + + const mockPrInfo = { + baseBranch: "main", + targetBranch: "main", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "main", + checkout: vi.fn(), + }; + + vi.mocked(createPullRequestProperties).mockResolvedValue(mockPrInfo); + + await buildPrInfo(mockContext); + + expect(createPullRequestProperties).toHaveBeenCalledWith(mockContext, "cross-version"); + }); + + it("should throw error when PR info creation fails", async () => { + const { createPullRequestProperties } = await import("../src/utils/pull-request.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + }; + + vi.mocked(createPullRequestProperties).mockResolvedValue(undefined); + + await expect(buildPrInfo(mockContext)).rejects.toThrow("create PR failed!"); + }); + + it("should throw error when PR info has no target branch", async () => { + const { createPullRequestProperties } = await import("../src/utils/pull-request.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + }; + + const mockPrInfo = { + baseBranch: "main", + targetBranch: "", // Empty target branch + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "main", + checkout: vi.fn(), + }; + + vi.mocked(createPullRequestProperties).mockResolvedValue(mockPrInfo); + + await expect(buildPrInfo(mockContext)).rejects.toThrow("create PR failed!"); + }); + }); + + describe("changeBaseBranch", () => { + it("should change base branch when different from target and not whitelisted", async () => { + const { logMessage } = await import("../src/log.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "develop", // Different from baseBranch + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + prInfo: { + baseBranch: "develop", + targetBranch: "develop", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "develop", + checkout: vi.fn(), + }, + }; + + changeBaseBranch(mockContext); + + expect(mockContext.prInfo!.baseBranch).toBe("main"); + expect(logMessage).toHaveBeenCalledWith("switch target branch to main"); + }); + + it("should not change base branch when same as target", () => { + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", // Same as baseBranch + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + prInfo: { + baseBranch: "main", + targetBranch: "main", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "main", + checkout: vi.fn(), + }, + }; + + const originalBaseBranch = mockContext.prInfo!.baseBranch; + changeBaseBranch(mockContext); + + expect(mockContext.prInfo!.baseBranch).toBe(originalBaseBranch); + }); + + it("should not change base branch for whitelisted branches", () => { + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "ARMCoreRPDev", // Whitelisted branch + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + prInfo: { + baseBranch: "ARMCoreRPDev", + targetBranch: "ARMCoreRPDev", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "ARMCoreRPDev", + checkout: vi.fn(), + }, + }; + + const originalBaseBranch = mockContext.prInfo!.baseBranch; + changeBaseBranch(mockContext); + + expect(mockContext.prInfo!.baseBranch).toBe(originalBaseBranch); + }); + + it("should change base branch for CrossVersion run type when different from target", async () => { + const { logMessage } = await import("../src/log.js"); + + const mockContext: Context = { + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "CrossVersion", // CrossVersion type + checkName: "test", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "develop", // Different from baseBranch + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + prInfo: { + baseBranch: "develop", + targetBranch: "develop", + sourceBranch: "feature", + workingDir: "/working/dir", + currentBranch: "develop", + checkout: vi.fn(), + }, + }; + + changeBaseBranch(mockContext); + + // CrossVersion also changes base branch when different from target + expect(mockContext.prInfo!.baseBranch).toBe("main"); + expect(logMessage).toHaveBeenCalledWith("switch target branch to main"); + }); + }); + + describe("logFullOadMessagesList", () => { + it("should log all messages individually", async () => { + const { logMessage } = await import("../src/log.js"); + + const msgs: ResultMessageRecord[] = [ + { + type: "Result", + level: "Error", + message: "Test error message", + time: new Date("2023-01-01"), + paths: [], + }, + { + type: "Result", + level: "Warning", + message: "Test warning message", + time: new Date("2023-01-02"), + paths: [], + }, + ]; + + logFullOadMessagesList(msgs); + + expect(logMessage).toHaveBeenCalledWith("---- Full list of messages ----"); + expect(logMessage).toHaveBeenCalledWith("["); + expect(logMessage).toHaveBeenCalledWith(JSON.stringify(msgs[0], null, 4) + ","); + expect(logMessage).toHaveBeenCalledWith(JSON.stringify(msgs[1], null, 4) + ","); + expect(logMessage).toHaveBeenCalledWith("]"); + expect(logMessage).toHaveBeenCalledWith("---- End of full list of messages ----"); + }); + + it("should handle empty message list", async () => { + const { logMessage } = await import("../src/log.js"); + + logFullOadMessagesList([]); + + expect(logMessage).toHaveBeenCalledWith("---- Full list of messages ----"); + expect(logMessage).toHaveBeenCalledWith("["); + expect(logMessage).toHaveBeenCalledWith("]"); + expect(logMessage).toHaveBeenCalledWith("---- End of full list of messages ----"); + }); + }); + + describe("createDummySwagger", () => { + it("should create dummy swagger file successfully", async () => { + const { logMessage } = await import("../src/log.js"); + + const fromSwagger = "/path/to/source.json"; + const toSwagger = "/path/to/target.json"; + const mockSwaggerContent = JSON.stringify({ + swagger: "2.0", + info: { title: "Test API", version: "1.0" }, + paths: { "/test": { get: {} } }, + "x-ms-paths": { "/test2": { post: {} } }, + "x-ms-parameterized-host": { hostTemplate: "test.com" }, + parameters: { testParam: {} }, + definitions: { TestModel: {} }, + }); + + mockExistsSync.mockReturnValue(false); + mockReadFileSync.mockReturnValue(Buffer.from(mockSwaggerContent)); + + createDummySwagger(fromSwagger, toSwagger); + + expect(mockMkdirSync).toHaveBeenCalledWith("/path/to", { recursive: true }); + expect(mockReadFileSync).toHaveBeenCalledWith(fromSwagger); + expect(mockWriteFileSync).toHaveBeenCalledWith( + toSwagger, + expect.stringContaining('"paths": {}'), + ); + expect(logMessage).toHaveBeenCalledWith( + `created a dummy swagger: ${toSwagger} from ${fromSwagger}`, + ); + + // Verify the dummy swagger content + const writeCall = mockWriteFileSync.mock.calls[0]; + const writtenContent = JSON.parse(writeCall[1] as string); + expect(writtenContent.paths).toEqual({}); + expect(writtenContent["x-ms-paths"]).toEqual({}); + expect(writtenContent["x-ms-parameterized-host"]).toBeUndefined(); + expect(writtenContent.parameters).toEqual({}); + expect(writtenContent.definitions).toEqual({}); + }); + + it("should create directory if it doesn't exist", () => { + const fromSwagger = "/path/to/source.json"; + const toSwagger = "/path/to/nested/target.json"; + const mockSwaggerContent = JSON.stringify({ + swagger: "2.0", + info: { title: "Test API", version: "1.0" }, + }); + + mockExistsSync.mockReturnValue(false); + mockReadFileSync.mockReturnValue(Buffer.from(mockSwaggerContent)); + + createDummySwagger(fromSwagger, toSwagger); + + expect(mockMkdirSync).toHaveBeenCalledWith("/path/to/nested", { recursive: true }); + }); + + it("should not create directory if it already exists", () => { + const fromSwagger = "/path/to/source.json"; + const toSwagger = "/path/to/target.json"; + const mockSwaggerContent = JSON.stringify({ + swagger: "2.0", + info: { title: "Test API", version: "1.0" }, + }); + + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from(mockSwaggerContent)); + + createDummySwagger(fromSwagger, toSwagger); + + expect(mockMkdirSync).not.toHaveBeenCalled(); + }); + + it("should handle swagger without optional fields", () => { + const fromSwagger = "/path/to/source.json"; + const toSwagger = "/path/to/target.json"; + const mockSwaggerContent = JSON.stringify({ + swagger: "2.0", + info: { title: "Test API", version: "1.0" }, + paths: { "/test": { get: {} } }, + }); + + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from(mockSwaggerContent)); + + createDummySwagger(fromSwagger, toSwagger); + + expect(mockWriteFileSync).toHaveBeenCalled(); + const writeCall = mockWriteFileSync.mock.calls[0]; + const writtenContent = JSON.parse(writeCall[1] as string); + expect(writtenContent.paths).toEqual({}); + expect(writtenContent["x-ms-paths"]).toBeUndefined(); + expect(writtenContent["x-ms-parameterized-host"]).toBeUndefined(); + }); + }); + + describe("cleanDummySwagger", () => { + it("should remove all created dummy swagger files", () => { + // Create some dummy files first + const file1 = "/path/to/dummy1.json"; + const file2 = "/path/to/dummy2.json"; + + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from('{"swagger": "2.0"}')); + + createDummySwagger("/source1.json", file1); + createDummySwagger("/source2.json", file2); + + // Clear previous mock calls before testing cleanup + mockRmSync.mockClear(); + + // Now clean them up + cleanDummySwagger(); + + expect(mockRmSync).toHaveBeenCalledWith(file1, { recursive: true, force: true }); + expect(mockRmSync).toHaveBeenCalledWith(file2, { recursive: true, force: true }); + expect(mockRmSync).toHaveBeenCalledTimes(2); + }); + + it("should handle empty list of created files", () => { + // Clear any previous calls + mockRmSync.mockClear(); + + cleanDummySwagger(); + expect(mockRmSync).not.toHaveBeenCalled(); + }); + }); + + describe("isSameVersionBreakingType", () => { + it("should return true for SameVersion type", () => { + expect(isSameVersionBreakingType("SameVersion")).toBe(true); + }); + + it("should return false for CrossVersion type", () => { + expect(isSameVersionBreakingType("CrossVersion")).toBe(false); + }); + }); + + describe("getCreatedDummySwaggerCount", () => { + it("should return current count of created dummy files", () => { + // Get initial count (may not be 0 due to other tests) + const initialCount = getCreatedDummySwaggerCount(); + + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from('{"swagger": "2.0"}')); + + createDummySwagger("/source1.json", "/dummy1.json"); + expect(getCreatedDummySwaggerCount()).toBe(initialCount + 1); + + createDummySwagger("/source2.json", "/dummy2.json"); + expect(getCreatedDummySwaggerCount()).toBe(initialCount + 2); + }); + + it("should return correct count after creating multiple dummy files", () => { + const initialCount = getCreatedDummySwaggerCount(); + + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from('{"swagger": "2.0"}')); + + createDummySwagger("/source1.json", "/dummy1.json"); + createDummySwagger("/source2.json", "/dummy2.json"); + expect(getCreatedDummySwaggerCount()).toBe(initialCount + 2); + + cleanDummySwagger(); + expect(getCreatedDummySwaggerCount()).toBe(0); + }); + + it("should return 0 after cleaning all dummy files", () => { + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue(Buffer.from('{"swagger": "2.0"}')); + + // Create some files + createDummySwagger("/source1.json", "/dummy1.json"); + createDummySwagger("/source2.json", "/dummy2.json"); + expect(getCreatedDummySwaggerCount()).toBeGreaterThan(0); + + // Clean them + cleanDummySwagger(); + expect(getCreatedDummySwaggerCount()).toBe(0); + }); + }); + + describe("outputBreakingChangeLabelVariables", () => { + beforeEach(() => { + // Clear the labels set before each test + BreakingChangeLabelsToBeAdded.clear(); + }); + + it("should set both labels to false when no labels need to be added", async () => { + const { setOutput } = await import("../src/log.js"); + + outputBreakingChangeLabelVariables(); + + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "false"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "false"); + }); + + it("should set BreakingChangeReviewRequired to true when present in labels set", async () => { + const { setOutput } = await import("../src/log.js"); + + BreakingChangeLabelsToBeAdded.add(BreakingChangeReviewRequiredLabel); + + outputBreakingChangeLabelVariables(); + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "true"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "false"); + }); + + it("should set VersioningReviewRequired to true when present in labels set", async () => { + const { setOutput } = await import("../src/log.js"); + + BreakingChangeLabelsToBeAdded.add(VersioningReviewRequiredLabel); + + outputBreakingChangeLabelVariables(); + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "false"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "true"); + }); + + it("should set both labels to true when both are present in labels set", async () => { + const { setOutput } = await import("../src/log.js"); + + BreakingChangeLabelsToBeAdded.add(BreakingChangeReviewRequiredLabel); + BreakingChangeLabelsToBeAdded.add(VersioningReviewRequiredLabel); + + outputBreakingChangeLabelVariables(); + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "true"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "true"); + }); + + it("should handle labels set with non-review labels", async () => { + const { setOutput } = await import("../src/log.js"); + + BreakingChangeLabelsToBeAdded.add("SomeOtherLabel"); + + outputBreakingChangeLabelVariables(); + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "false"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "false"); + }); + + it("should handle mixed labels including one review label", async () => { + const { setOutput } = await import("../src/log.js"); + + BreakingChangeLabelsToBeAdded.add("SomeOtherLabel"); + BreakingChangeLabelsToBeAdded.add(BreakingChangeReviewRequiredLabel); + + outputBreakingChangeLabelVariables(); + expect(setOutput).toHaveBeenCalledWith( + "breakingChangeReviewLabelName", + BreakingChangeReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("breakingChangeReviewLabelValue", "true"); + expect(setOutput).toHaveBeenCalledWith( + "versioningReviewLabelName", + VersioningReviewRequiredLabel, + ); + expect(setOutput).toHaveBeenCalledWith("versioningReviewLabelValue", "false"); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/generate-report.test.ts b/eng/tools/openapi-diff-runner/test/generate-report.test.ts new file mode 100644 index 000000000000..94d1c3d1eef9 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/generate-report.test.ts @@ -0,0 +1,399 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { generateBreakingChangeResultSummary } from "../src/generate-report.js"; +import { Context } from "../src/types/breaking-change.js"; +import { RawMessageRecord, ResultMessageRecord } from "../src/types/message.js"; +import { addToSummary, logMessage, logWarning } from "../src/log.js"; +import { + BreakingChangeMdReport, + createBreakingChangeMdReport, + reportToString, + sortBreakingChangeMdReports, +} from "../src/utils/markdown-report.js"; + +// Mock dependencies +vi.mock("../src/log.js"); +vi.mock("../src/utils/markdown-report.js"); + +describe("generate-report", () => { + const mockAddToSummary = vi.mocked(addToSummary); + const mockLogMessage = vi.mocked(logMessage); + const mockLogWarning = vi.mocked(logWarning); + const mockCreateBreakingChangeMdReport = vi.mocked(createBreakingChangeMdReport); + const mockReportToString = vi.mocked(reportToString); + const mockSortBreakingChangeMdReports = vi.mocked(sortBreakingChangeMdReports); + + beforeEach(() => { + vi.clearAllMocks(); + + // Mock environment variable for GitHub Actions + vi.stubEnv("GITHUB_STEP_SUMMARY", "/path/to/summary"); + + // Setup default mock implementations + mockCreateBreakingChangeMdReport.mockReturnValue({ + msgs: [], + rows: [], + type: "Result", + level: "Error", + rawMessage: "Test report", + } as BreakingChangeMdReport); + + mockReportToString.mockReturnValue("Mock report string"); + mockSortBreakingChangeMdReports.mockImplementation((reports) => reports); + }); + + afterEach(() => { + vi.resetAllMocks(); + vi.unstubAllEnvs(); + }); + + const createMockContext = (overrides: Partial = {}): Context => ({ + localSpecRepoPath: "/path/to/repo", + workingFolder: "/working", + logFileFolder: "/logs", + swaggerDirs: ["specification"], + baseBranch: "main", + headCommit: "HEAD", + runType: "SameVersion", + checkName: "Swagger BreakingChange", + repo: "test/repo", + prNumber: "123", + prSourceBranch: "feature", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/log/path", + prUrl: "https://github.com/test/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/test/repo/pull/123", + ...overrides, + }); + + const createMockResultMessage = ( + overrides: Partial = {}, + ): ResultMessageRecord => ({ + type: "Result", + level: "Error", + message: "Test error message", + time: new Date("2023-01-01"), + paths: [], + groupName: "stable", + ...overrides, + }); + + const createMockRawMessage = (overrides: Partial = {}): RawMessageRecord => ({ + type: "Raw", + level: "Error", + message: "Test raw error", + time: new Date("2023-01-01"), + groupName: "stable", + extra: {}, + ...overrides, + }); + + describe("generateBreakingChangeResultSummary", () => { + it("should generate summary with success status", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = + "| Spec | Status |\n|------|--------|\n| test.json | Modified |"; + const summaryDataSuppressionAndDetailsText = "\n\nAdditional details..."; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockLogMessage).toHaveBeenCalledWith(expect.stringContaining("Successfully wrote")); + }); + + it("should generate summary with failure status", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = [ + createMockResultMessage({ level: "Error" }), + createMockResultMessage({ level: "Warning" }), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining("Detected: 1 Errors, 1 Warnings"), + ); + }); + + it("should handle messages with only warnings", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = [ + createMockResultMessage({ level: "Warning" }), + createMockResultMessage({ level: "Warning" }), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining("Detected: 2 Warnings"), + ); + }); + + it("should include compared specs table content", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = + "| Spec | Status |\n|------|--------|\n| test.json | Modified |"; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith(expect.stringContaining("| Spec | Status |")); + }); + + it("should handle runtime errors", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = [createMockRawMessage({ level: "Error" })]; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("messageRecords# raw/result/all: 1/0/1"), + ); + }); + + it("should handle different check names", async () => { + const context = createMockContext({ + checkName: "BreakingChange(Cross-Version)", + }); + const messages: ResultMessageRecord[] = [ + createMockResultMessage({ groupName: "stable" }), + createMockResultMessage({ groupName: "preview" }), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith(expect.stringContaining("Detected")); + }); + + it("should handle mixed stable and preview messages", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = [ + createMockResultMessage({ level: "Error", groupName: "stable" }), + createMockResultMessage({ level: "Warning", groupName: "preview" }), + createMockResultMessage({ level: "Error", groupName: "preview" }), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + mockCreateBreakingChangeMdReport.mockReturnValue({ + msgs: [], + rows: [], + type: "Result", + level: "Error", + rawMessage: "Test report with messages", + } as BreakingChangeMdReport); + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockCreateBreakingChangeMdReport).toHaveBeenCalled(); + expect(mockSortBreakingChangeMdReports).toHaveBeenCalled(); + }); + + it("should handle empty message lists", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining("No breaking changes detected"), + ); + }); + + it("should handle cross-version check with different API versions", async () => { + const context = createMockContext({ + checkName: "BreakingChange(Cross-Version)", + }); + const messages: ResultMessageRecord[] = [ + createMockResultMessage({ groupName: "stable" }), + createMockResultMessage({ groupName: "preview" }), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining( + "The following breaking changes have been detected in comparison to the latest stable version", + ), + ); + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining( + "The following breaking changes have been detected in comparison to the latest preview version", + ), + ); + }); + + it.skip("should handle comment data length exceeding limit", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = [createMockResultMessage()]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = "x".repeat(60000); // Very long table content + const summaryDataSuppressionAndDetailsText = ""; + + // Mock a very long report string + mockReportToString.mockReturnValue("x".repeat(10000)); + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockLogWarning).toHaveBeenCalledWith( + expect.stringContaining("ASSERTION VIOLATION! commentData.length"), + ); + expect(mockAddToSummary).toHaveBeenCalledWith(expect.stringContaining("⚠️ TRUNCATED ⚠️")); + }); + + it.skip("should iteratively reduce max row count to fit within limits", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = [ + createMockResultMessage(), + createMockResultMessage(), + createMockResultMessage(), + ]; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + // First call returns long string, subsequent calls return shorter strings + mockReportToString + .mockReturnValueOnce("x".repeat(70000)) // Too long + .mockReturnValueOnce("x".repeat(60000)) // Still too long + .mockReturnValueOnce("x".repeat(50000)); // Finally fits + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("maxRowCount reduced/current/max"), + ); + }); + + it("should include suppression and details text in summary", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = + "\n\n**Suppression Info:**\nSome details about suppressions."; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining("**Suppression Info:**\nSome details about suppressions."), + ); + }); + + it("should include important notice in summary", async () => { + const context = createMockContext(); + const messages: ResultMessageRecord[] = []; + const runtimeErrors: RawMessageRecord[] = []; + const comparedSpecsTableContent = ""; + const summaryDataSuppressionAndDetailsText = ""; + + await generateBreakingChangeResultSummary( + context, + messages, + runtimeErrors, + comparedSpecsTableContent, + summaryDataSuppressionAndDetailsText, + ); + + expect(mockAddToSummary).toHaveBeenCalledWith( + expect.stringContaining("> [!IMPORTANT]\n> Browse to the job logs to see the details."), + ); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/types/oad-types.test.ts b/eng/tools/openapi-diff-runner/test/types/oad-types.test.ts new file mode 100644 index 000000000000..33b76c16eda4 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/types/oad-types.test.ts @@ -0,0 +1,90 @@ +import { describe, expect, it } from "vitest"; +import { + createOadTrace, + addOadTrace, + generateOadMarkdown, + setOadBaseBranch, +} from "../../src/types/oad-types.js"; +import { Context } from "../../src/types/breaking-change.js"; + +const mockContext: Context = { + runType: "SameVersion", + prUrl: "https://github.com/Azure/azure-rest-api-specs/pull/12345", + prTargetBranch: "main", + prSourceBranch: "feature-branch", + headCommit: "abc123", + baseBranch: "main", + localSpecRepoPath: "/path/to/repo", + workingFolder: "/path/to/working", + logFileFolder: "/path/to/logs", + swaggerDirs: ["/path/to/swagger"], + checkName: "test-check", + repo: "Azure/azure-rest-api-specs", + prNumber: "12345", + oadMessageProcessorContext: { + logFilePath: "/path/to/log", + prUrl: "https://github.com/Azure/azure-rest-api-specs/pull/12345", + messageCache: [], + }, +}; + +describe("OAD Trace Functions", () => { + it("should create an empty trace data structure", () => { + const traceData = createOadTrace(mockContext); + expect(traceData.traces).toEqual([]); + expect(traceData.context).toBe(mockContext); + }); + + it("should add a trace entry", () => { + const traceData = createOadTrace(mockContext); + const updatedTrace = addOadTrace(traceData, "path/to/old.json", "path/to/new.json"); + + expect(updatedTrace.traces).toHaveLength(1); + expect(updatedTrace.traces[0]).toEqual({ + old: "path/to/old.json", + new: "path/to/new.json", + baseBranch: "main", + }); + }); + + it("should set base branch", () => { + const traceData = createOadTrace(mockContext); + const updatedTrace = setOadBaseBranch(traceData, "feature-branch"); + + expect(updatedTrace.baseBranch).toBe("feature-branch"); + }); + + it("should generate empty markdown when no traces", () => { + const traceData = createOadTrace(mockContext); + const markdown = generateOadMarkdown(traceData); + + expect(markdown).toBe(""); + }); + + it("should generate markdown table when traces exist", () => { + let traceData = createOadTrace(mockContext); + traceData = addOadTrace( + traceData, + "specification/storage/resource-manager/Microsoft.Storage/stable/2021-09-01/storage.json", + "specification/storage/resource-manager/Microsoft.Storage/stable/2021-09-01/storage.json", + ); + + const markdown = generateOadMarkdown(traceData); + + expect(markdown).toContain("| Compared specs"); + expect(markdown).toContain("storage.json"); + expect(markdown).toContain("2021-09-01"); + expect(markdown).toContain("abc123"); + expect(markdown).toContain("main"); + }); + + it("should accumulate multiple traces", () => { + let traceData = createOadTrace(mockContext); + traceData = addOadTrace(traceData, "path/to/old1.json", "path/to/new1.json"); + traceData = addOadTrace(traceData, "path/to/old2.json", "path/to/new2.json"); + + expect(traceData.traces).toHaveLength(2); + expect(traceData.traces[0].old).toBe("path/to/old1.json"); + expect(traceData.traces[1].old).toBe("path/to/old2.json"); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/apply-rules.test.ts b/eng/tools/openapi-diff-runner/test/utils/apply-rules.test.ts new file mode 100644 index 000000000000..89a83f7d7ed3 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/apply-rules.test.ts @@ -0,0 +1,222 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { applyRules } from "../../src/utils/apply-rules.js"; +import { OadMessage } from "../../src/types/oad-types.js"; +import { BreakingChangeLabelsToBeAdded } from "../../src/command-helpers.js"; +import { logMessage, logWarning } from "../../src/log.js"; + +// Mock the command-helpers module +vi.mock("../../src/command-helpers.js", () => ({ + BreakingChangeLabelsToBeAdded: { + add: vi.fn(), + clear: vi.fn(), + values: [], + }, +})); + +// Mock the log module +vi.mock("../../src/log.js", () => ({ + logMessage: vi.fn(), + logWarning: vi.fn(), + LogLevel: { + Info: "Info", + Warning: "Warning", + Error: "Error", + }, +})); + +// Mock the oad-rule-map module +vi.mock("../../src/utils/oad-rule-map.js", () => ({ + oadMessagesRuleMap: [ + { + scenario: "SameVersion", + code: "AddedRequiredProperty", + severity: "Error", + label: "BreakingChangeReviewRequired", + }, + { + scenario: "CrossVersion", + code: "AddedRequiredProperty", + severity: "Error", + label: "BreakingChangeReviewRequired", + }, + { + scenario: "SameVersion", + code: "RemovedProperty", + severity: "Warning", + label: null, + }, + ], + fallbackRule: { + severity: "Warning", + label: null, + }, + fallbackLabel: "BreakingChangeReviewRequired", +})); + +const createTestOadMessage = ( + code: string = "AddedRequiredProperty", + id: string = "1001", +): OadMessage => ({ + type: "Info", + code: code as any, + id, + message: `Test message for ${code}`, + docUrl: `https://docs.example.com/rules/${code}`, + mode: "Addition", + new: { location: "specification/test.json#L10", path: "specification/test.json" }, + old: { location: "specification/test.json#L8", path: "specification/test.json" }, +}); + +describe("apply-rules", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe("applyRules", () => { + it("should apply matching rule for same version scenario", () => { + const oadMessages: OadMessage[] = [createTestOadMessage()]; + + const result = applyRules(oadMessages, "SameVersion", "stable"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Error"); + expect(result[0].groupName).toBe("stable"); + expect(BreakingChangeLabelsToBeAdded.add).toHaveBeenCalledWith( + "BreakingChangeReviewRequired", + ); + }); + + it("should apply matching rule for cross version scenario", () => { + const oadMessages: OadMessage[] = [createTestOadMessage()]; + + const result = applyRules(oadMessages, "CrossVersion", "stable"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Error"); + expect(result[0].groupName).toBe("stable"); + expect(BreakingChangeLabelsToBeAdded.add).toHaveBeenCalledWith( + "BreakingChangeReviewRequired", + ); + }); + + it("should downgrade error to warning for cross version against previous preview", () => { + const oadMessages: OadMessage[] = [createTestOadMessage()]; + + const result = applyRules(oadMessages, "CrossVersion", "preview"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Warning"); + expect(result[0].groupName).toBe("preview"); + expect(BreakingChangeLabelsToBeAdded.add).not.toHaveBeenCalled(); + }); + + it("should use VersioningReviewRequired label for same version preview", () => { + const oadMessages: OadMessage[] = [createTestOadMessage()]; + + const result = applyRules(oadMessages, "SameVersion", "preview"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Error"); + expect(result[0].groupName).toBe("preview"); + expect(BreakingChangeLabelsToBeAdded.add).toHaveBeenCalledWith("VersioningReviewRequired"); + }); + + it("should not add label for warning severity", () => { + const oadMessages: OadMessage[] = [createTestOadMessage("RemovedProperty", "1002")]; + + const result = applyRules(oadMessages, "SameVersion", "stable"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Warning"); + expect(result[0].groupName).toBe("stable"); + expect(BreakingChangeLabelsToBeAdded.add).not.toHaveBeenCalled(); + }); + + it("should use fallback rule when no matching rule found", () => { + const oadMessages: OadMessage[] = [createTestOadMessage("TypeChanged", "1003")]; + + const result = applyRules(oadMessages, "SameVersion", "stable"); + + expect(result).toHaveLength(1); + expect(result[0].type).toBe("Warning"); + expect(result[0].groupName).toBe("stable"); + expect(logWarning).toHaveBeenCalledWith( + expect.stringContaining("No rule found for scenario"), + ); + }); + + it("should handle multiple messages", () => { + const oadMessages: OadMessage[] = [ + createTestOadMessage("AddedRequiredProperty", "1001"), + createTestOadMessage("RemovedProperty", "1002"), + ]; + + const result = applyRules(oadMessages, "SameVersion", "stable"); + + expect(result).toHaveLength(2); + expect(result[0].type).toBe("Error"); + expect(result[0].groupName).toBe("stable"); + expect(result[1].type).toBe("Warning"); + expect(result[1].groupName).toBe("stable"); + expect(BreakingChangeLabelsToBeAdded.add).toHaveBeenCalledTimes(1); + expect(BreakingChangeLabelsToBeAdded.add).toHaveBeenCalledWith( + "BreakingChangeReviewRequired", + ); + }); + + it("should preserve original message properties", () => { + const originalMessage: OadMessage = { + type: "Info", + code: "AddedRequiredProperty", + id: "1001", + message: "Added required property 'test'", + docUrl: "https://docs.example.com/rules/AddedRequiredProperty", + mode: "Addition", + new: { + location: "specification/test.json#L10", + path: "specification/test.json", + ref: "test-ref", + }, + old: { location: "specification/test.json#L8", path: "specification/test.json" }, + }; + + const result = applyRules([originalMessage], "SameVersion", "stable"); + + expect(result[0]).toMatchObject({ + code: "AddedRequiredProperty", + id: "1001", + message: "Added required property 'test'", + docUrl: "https://docs.example.com/rules/AddedRequiredProperty", + mode: "Addition", + new: { + location: "specification/test.json#L10", + path: "specification/test.json", + ref: "test-ref", + }, + old: { location: "specification/test.json#L8", path: "specification/test.json" }, + }); + }); + + it("should log entry and exit", () => { + const oadMessages: OadMessage[] = [createTestOadMessage()]; + + applyRules(oadMessages, "SameVersion", "stable"); + + expect(logMessage).toHaveBeenCalledWith("ENTER definition applyRules"); + expect(logMessage).toHaveBeenCalledWith("RETURN definition applyRules"); + }); + + it("should warn when rule has error severity but no label", () => { + // This test would require mocking the rule map differently, + // but the current implementation should handle this case + const oadMessages: OadMessage[] = [createTestOadMessage("TypeChanged", "1001")]; + + const result = applyRules(oadMessages, "SameVersion", "stable"); + + expect(result[0].type).toBe("Warning"); + expect(logWarning).toHaveBeenCalledWith( + expect.stringContaining("No rule found for scenario"), + ); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/common-utils.test.ts b/eng/tools/openapi-diff-runner/test/utils/common-utils.test.ts new file mode 100644 index 000000000000..b4f7e5d7bae8 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/common-utils.test.ts @@ -0,0 +1,392 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import { readFileSync, existsSync } from "node:fs"; +import { + blobHref, + targetHref, + branchHref, + getTargetBranch, + getGithubStyleFilePath, + getRelativeSwaggerPathToRepo, + sourceBranchHref, + targetBranchHref, + specificBranchHref, + getVersionFromInputFile, + getArgumentValue, + cutoffMsg, + processOadRuntimeErrorMessage, + specIsPreview, +} from "../../src/utils/common-utils.js"; + +// Mock node:fs +vi.mock("node:fs", () => ({ + existsSync: vi.fn(), + readFileSync: vi.fn(), +})); + +describe("common-utils", () => { + let originalEnv: NodeJS.ProcessEnv; + + beforeEach(() => { + originalEnv = { ...process.env }; + vi.clearAllMocks(); + }); + + afterEach(() => { + process.env = originalEnv; + }); + + describe("blobHref", () => { + it("should return GitHub URL when GITHUB_ACTIONS is set", () => { + process.env.GITHUB_ACTIONS = "true"; + process.env.GITHUB_HEAD_REPOSITORY = "owner/repo"; + process.env.GITHUB_SHA = "abc123"; + + const result = blobHref("test-file.json"); + expect(result).toBe("https://github.com/owner/repo/blob/abc123/test-file.json"); + }); + + it("should return file path for local development", () => { + delete process.env.GITHUB_ACTIONS; + const result = blobHref("test-file.json"); + expect(result).toBe("test-file.json"); + }); + }); + + describe("targetHref", () => { + beforeEach(() => { + process.env.GITHUB_REPOSITORY = "owner/repo"; + vi.spyOn(console, "log").mockImplementation(() => {}); + }); + + it("should return GitHub URL for valid file", () => { + process.env.GITHUB_BASE_REF = "main"; + const result = targetHref("specification/test.json"); + expect(result).toBe("https://github.com/owner/repo/blob/main/specification/test.json"); + }); + + it("should return empty string for empty file", () => { + const result = targetHref(""); + expect(result).toBe(""); + }); + }); + + describe("branchHref", () => { + beforeEach(() => { + process.env.GITHUB_REPOSITORY = "owner/repo"; + }); + + it("should return GitHub URL with specified branch", () => { + const result = branchHref("test-file.json", "feature-branch"); + expect(result).toBe("https://github.com/owner/repo/blob/feature-branch/test-file.json"); + }); + + it("should use main as default branch", () => { + const result = branchHref("test-file.json"); + expect(result).toBe("https://github.com/owner/repo/blob/main/test-file.json"); + }); + + it("should return empty string for empty file", () => { + const result = branchHref(""); + expect(result).toBe(""); + }); + }); + + describe("getTargetBranch", () => { + beforeEach(() => { + vi.spyOn(console, "log").mockImplementation(() => {}); + }); + + it("should return GITHUB_BASE_REF when available", () => { + process.env.GITHUB_BASE_REF = "main"; + process.env.GITHUB_REF_NAME = "feature"; + + const result = getTargetBranch(); + expect(result).toBe("main"); + }); + + it("should return GITHUB_REF_NAME when GITHUB_BASE_REF is not available", () => { + delete process.env.GITHUB_BASE_REF; + process.env.GITHUB_REF_NAME = "feature"; + + const result = getTargetBranch(); + expect(result).toBe("feature"); + }); + + it("should return 'main' as default", () => { + delete process.env.GITHUB_BASE_REF; + delete process.env.GITHUB_REF_NAME; + + const result = getTargetBranch(); + expect(result).toBe("main"); + }); + + it("should trim whitespace", () => { + process.env.GITHUB_BASE_REF = " main "; + + const result = getTargetBranch(); + expect(result).toBe("main"); + }); + }); + + describe("getGithubStyleFilePath", () => { + it("should format file path with line and column", () => { + const result = getGithubStyleFilePath("test.json", { line: 42, column: 5 }); + expect(result).toBe("test.json#L42:5"); + }); + + it("should format file path with colon replacement", () => { + const result = getGithubStyleFilePath("test.json:42:5"); + expect(result).toBe("test.json#L42:5"); + }); + + it("should handle file path without FilePosition", () => { + const result = getGithubStyleFilePath("test.json"); + expect(result).toBe("test.json"); + }); + }); + + describe("getRelativeSwaggerPathToRepo", () => { + it("should extract path from specification directory", () => { + const filePath = + "/home/user/azure-rest-api-specs/specification/storage/resource-manager/test.json"; + const result = getRelativeSwaggerPathToRepo(filePath); + expect(result).toBe("specification/storage/resource-manager/test.json"); + }); + + it("should use BUILD_SOURCEDIRECTORY when pattern not found", () => { + process.env.BUILD_SOURCEDIRECTORY = "/home/user/azure-rest-api-specs/"; + const filePath = "/home/user/azure-rest-api-specs/other/test.json"; + const result = getRelativeSwaggerPathToRepo(filePath); + expect(result).toBe("other/test.json"); + }); + + it("should handle custom patterns", () => { + const filePath = "/home/user/azure-rest-api-specs/custom/api/test.json"; + const result = getRelativeSwaggerPathToRepo(filePath, ["custom"]); + expect(result).toBe("custom/api/test.json"); + }); + }); + + describe("getVersionFromInputFile", () => { + beforeEach(() => { + vi.mocked(existsSync).mockReturnValue(false); + }); + + it("should extract version from data-plane path", () => { + const filePath = + "specification/storage/data-plane/Microsoft.Storage/stable/2021-01-01/storage.json"; + const result = getVersionFromInputFile(filePath); + expect(result).toBe("2021-01-01"); + }); + + it("should extract version with preview from data-plane path", () => { + const filePath = + "specification/storage/data-plane/Microsoft.Storage/preview/2021-01-01-preview/storage.json"; + const result = getVersionFromInputFile(filePath, true); + expect(result).toBe("2021-01-01-preview"); + }); + + it("should extract version from resource-manager path", () => { + const filePath = + "specification/storage/resource-manager/Microsoft.Storage/2021-01-01/storage.json"; + const result = getVersionFromInputFile(filePath); + expect(result).toBe("2021-01-01"); + }); + + it("should read version from file when exists", () => { + const filePath = "test.json"; + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ info: { version: "1.0.0" } })); + + const result = getVersionFromInputFile(filePath); + expect(result).toBe("1.0.0"); + }); + + it("should return undefined when no version found", () => { + const filePath = "invalid/path.json"; + const result = getVersionFromInputFile(filePath); + expect(result).toBeUndefined(); + }); + }); + + describe("getArgumentValue", () => { + it("should return argument value when flag exists", () => { + const args = ["--input", "test.json", "--output", "result.json"]; + const result = getArgumentValue(args, "--input", "default.json"); + expect(result).toBe("test.json"); + }); + + it("should return default value when flag not found", () => { + const args = ["--output", "result.json"]; + const result = getArgumentValue(args, "--input", "default.json"); + expect(result).toBe("default.json"); + }); + + it("should return default value when flag is last argument", () => { + const args = ["--output", "result.json", "--input"]; + const result = getArgumentValue(args, "--input", "default.json"); + expect(result).toBe("default.json"); + }); + }); + + describe("cutoffMsg", () => { + it("should return original message when within size limit", () => { + const msg = "short message"; + const result = cutoffMsg(msg, 1024); + expect(result).toBe("short message"); + }); + + it("should truncate message when exceeding size limit", () => { + const msg = "a".repeat(2000); + const result = cutoffMsg(msg, 1024); + expect(result).toBe("a".repeat(1024)); + }); + + it("should return empty string for undefined message", () => { + const result = cutoffMsg(undefined); + expect(result).toBe(""); + }); + + it("should use default size of 1024", () => { + const msg = "a".repeat(2000); + const result = cutoffMsg(msg); + expect(result).toBe("a".repeat(1024)); + }); + }); + + describe("processOadRuntimeErrorMessage", () => { + it("should process AutoRest runtime error", () => { + const message = + 'Command failed: node "/path/to/autorest/dist/app.js" --v2\\nERROR: Schema violation\\nFATAL: Error occurred'; + const result = processOadRuntimeErrorMessage(message, 500); + + expect(result).toContain("Breaking change detector (OAD) invoked AutoRest"); + expect(result).toContain("1: Command failed:"); + expect(result).toContain("ERROR: Schema violation"); + expect(result).toContain("
"); + }); + + it("should use cutoffMsg for non-AutoRest errors", () => { + const message = "Some other error message"; + const result = processOadRuntimeErrorMessage(message, 500); + expect(result).toBe("Some other error message"); + }); + + it("should handle empty lines in AutoRest error", () => { + const message = + 'Command failed: node "/path/to/autorest/dist/app.js"\n\nERROR: Test\n\nFATAL: Error'; + const result = processOadRuntimeErrorMessage(message, 500); + + expect(result).toContain("1: Command failed:"); + expect(result).toContain("ERROR: Test"); + expect(result).toContain("FATAL: Error"); + }); + + it("should respect stack trace max length", () => { + const lines = Array.from({ length: 10 }, (_, i) => `Line ${i + 1}`); + const message = `Command failed: node "/path/to/autorest/dist/app.js"\n${lines.join("\n")}`; + const result = processOadRuntimeErrorMessage(message, 3); + + expect(result).toContain("1: Command failed:"); + expect(result).toContain("2: Line 1"); + expect(result).toContain("3: Line 2"); + // Should not contain the 4th line since max length is 3 + expect(result).not.toContain("4:"); + }); + }); + + describe("specIsPreview", () => { + it("should return true for preview spec paths", () => { + const specPath = + "specification/maps/data-plane/Creator/preview/2022-09-01-preview/wayfind.json"; + const result = specIsPreview(specPath); + expect(result).toBe(true); + }); + + it("should return false for stable spec paths", () => { + const specPath = "specification/maps/data-plane/Creator/stable/2022-09-01/wayfind.json"; + const result = specIsPreview(specPath); + expect(result).toBe(false); + }); + + it("should return false when both preview and stable are in path", () => { + const specPath = + "specification/maps/data-plane/Creator/stable/2022-09-01/preview-example.json"; + const result = specIsPreview(specPath); + expect(result).toBe(false); + }); + + it("should return false for paths without preview", () => { + const specPath = "specification/maps/data-plane/Creator/2022-09-01/wayfind.json"; + const result = specIsPreview(specPath); + expect(result).toBe(false); + }); + }); + + describe("sourceBranchHref", () => { + beforeEach(() => { + process.env.GITHUB_ACTIONS = "true"; + process.env.GITHUB_HEAD_REPOSITORY = "owner/repo"; + process.env.GITHUB_SHA = "abc123"; + }); + + it("should return source branch href with file position", () => { + const result = sourceBranchHref("/home/user/specification/test.json", { + line: 10, + column: 5, + }); + expect(result).toContain( + "https://github.com/owner/repo/blob/abc123/specification/test.json#L10:5", + ); + }); + + it("should return source branch href without file position", () => { + const result = sourceBranchHref("/home/user/specification/test.json"); + expect(result).toContain("https://github.com/owner/repo/blob/abc123/specification/test.json"); + }); + }); + + describe("targetBranchHref", () => { + beforeEach(() => { + process.env.GITHUB_REPOSITORY = "owner/repo"; + process.env.GITHUB_BASE_REF = "main"; + vi.spyOn(console, "log").mockImplementation(() => {}); + }); + + it("should return target branch href with file position", () => { + const result = targetBranchHref("/home/user/specification/test.json", { + line: 20, + column: 3, + }); + expect(result).toBe("https://github.com/owner/repo/blob/main/specification/test.json#L20:3"); + }); + + it("should return target branch href without file position", () => { + const result = targetBranchHref("/home/user/specification/test.json"); + expect(result).toBe("https://github.com/owner/repo/blob/main/specification/test.json"); + }); + }); + + describe("specificBranchHref", () => { + beforeEach(() => { + process.env.GITHUB_REPOSITORY = "owner/repo"; + }); + + it("should return specific branch href with file position", () => { + const result = specificBranchHref("/home/user/specification/test.json", "feature-branch", { + line: 15, + column: 2, + }); + expect(result).toBe( + "https://github.com/owner/repo/blob/feature-branch/specification/test.json#L15:2", + ); + }); + + it("should return specific branch href without file position", () => { + const result = specificBranchHref("/home/user/specification/test.json", "feature-branch"); + expect(result).toBe( + "https://github.com/owner/repo/blob/feature-branch/specification/test.json", + ); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/markdown-report-row.test.ts b/eng/tools/openapi-diff-runner/test/utils/markdown-report-row.test.ts new file mode 100644 index 000000000000..35f473d171b9 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/markdown-report-row.test.ts @@ -0,0 +1,182 @@ +import { describe, it, expect } from "vitest"; +import { + createBreakingChangeMdRows, + getMdTableHeader, + getDeficitRow, + rowToString, + BreakingChangeMdRow, +} from "../../src/utils/markdown-report-row.js"; +import { BrChMsgRecord, ResultMessageRecord } from "../../src/types/message.js"; + +describe("markdown-report-row", () => { + describe("createBreakingChangeMdRows", () => { + it("should create rows from Result messages", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "test-id", + level: "Error", + message: "Test error message", + time: new Date("2023-01-01"), + paths: [ + { + tag: "New", + path: "https://github.com/owner/repo/blob/main/specification/test.json#L10:5", + jsonPath: "$.paths./test.get", + }, + { + tag: "Old", + path: "https://github.com/owner/repo/blob/old/specification/test.json#L8:3", + jsonPath: "$.paths./test.get.old", + }, + ], + } as ResultMessageRecord, + ]; + + const result = createBreakingChangeMdRows(msgs); + + expect(result).toHaveLength(1); + expect(result[0].index).toBe(1); + expect(result[0].msg).toBe(msgs[0]); + expect(result[0].description).toContain("Test error message"); + }); + + it("should create rows from Raw messages", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Raw", + level: "Error", + message: "Raw error message", + time: new Date("2023-01-01"), + groupName: "test-group", + extra: { + details: "Additional details", + code: "ERROR_CODE", + }, + }, + ]; + + const result = createBreakingChangeMdRows(msgs); + + expect(result).toHaveLength(1); + expect(result[0].index).toBe(1); + expect(result[0].description).toContain('"details":"Additional details"'); + expect(result[0].description).toContain('"code":"ERROR_CODE"'); + }); + + it("should sort rows by description", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "test-id-2", + level: "Error", + message: "Z message", + time: new Date("2023-01-01"), + paths: [], + } as ResultMessageRecord, + { + type: "Result", + id: "test-id-1", + level: "Error", + message: "A message", + time: new Date("2023-01-01"), + paths: [], + } as ResultMessageRecord, + ]; + + const result = createBreakingChangeMdRows(msgs); + + expect(result[0].msg.message).toBe("A message"); + expect(result[1].msg.message).toBe("Z message"); + }); + }); + + describe("getMdTableHeader", () => { + it("should return markdown table header", () => { + const result = getMdTableHeader(); + expect(result).toBe("| Index | Description |\n|-|-|\n"); + }); + }); + + describe("getDeficitRow", () => { + it("should return singular deficit row", () => { + const result = getDeficitRow(1); + expect(result).toBe("|| ⚠️ 1 occurrence omitted. See the build log.|\n"); + }); + + it("should return plural deficit row", () => { + const result = getDeficitRow(5); + expect(result).toBe("|| ⚠️ 5 occurrences omitted. See the build log.|\n"); + }); + }); + + describe("rowToString", () => { + it("should convert row to markdown string", () => { + const row: BreakingChangeMdRow = { + index: 1, + description: "Test description with
tags", + msg: { + type: "Result", + id: "test-id", + level: "Error", + message: "Test message", + time: new Date("2023-01-01"), + paths: [], + } as ResultMessageRecord, + }; + + const result = rowToString(row); + expect(result).toBe("| 1 | Test description with
tags |\n"); + }); + }); + + describe("integration with complex messages", () => { + it("should handle Result message with multiple paths", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "test-id", + level: "Error", + message: "Property 'test' was removed", + time: new Date("2023-01-01"), + paths: [ + { + tag: "New", + path: "https://github.com/owner/repo/blob/main/specification/storage/resource-manager/Microsoft.Storage/stable/2021-01-01/storage.json#L100:5", + jsonPath: "$.definitions.StorageAccount.properties.test", + }, + { + tag: "Old", + path: "https://github.com/owner/repo/blob/old/specification/storage/resource-manager/Microsoft.Storage/stable/2021-01-01/storage.json#L95:3", + jsonPath: "$.definitions.StorageAccount.properties.test.old", + }, + ], + } as ResultMessageRecord, + ]; + + const result = createBreakingChangeMdRows(msgs); + const description = result[0].description; + + expect(description).toContain("Property 'test' was removed"); + expect(description).toContain("New: [Microsoft.Storage/stable/2021-01-01/storage.json"); + expect(description).toContain("Old: [Microsoft.Storage/stable/2021-01-01/storage.json"); + expect(description).toContain("$.definitions.StorageAccount.properties.test"); + }); + + it("should handle message with newlines and tabs", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "test-id", + level: "Error", + message: "Test message\nwith newlines\tand tabs\r", + time: new Date("2023-01-01"), + paths: [], + } as ResultMessageRecord, + ]; + + const result = createBreakingChangeMdRows(msgs); + expect(result[0].description).toBe("Test message with newlines and tabs
"); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/markdown-report.test.ts b/eng/tools/openapi-diff-runner/test/utils/markdown-report.test.ts new file mode 100644 index 000000000000..8af37b437706 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/markdown-report.test.ts @@ -0,0 +1,360 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { + createBreakingChangeMdReport, + sortBreakingChangeMdReports, + getReportLength, + reportToString, + getRowCount, + BreakingChangeMdReport, +} from "../../src/utils/markdown-report.js"; +import { BrChMsgRecord, ResultMessageRecord } from "../../src/types/message.js"; +import { logMessage, LogLevel } from "../../src/log.js"; + +// Mock the log module +vi.mock("../../src/log.js", () => ({ + logMessage: vi.fn(), + LogLevel: { + Info: "Info", + Warning: "Warning", + Error: "Error", + }, +})); + +describe("markdown-report", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe("createBreakingChangeMdReport", () => { + it("should create report from Result messages", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Test error message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com/rules/R001", + code: "TestError", + paths: [ + { + tag: "New", + path: "https://github.com/owner/repo/blob/main/specification/test.json", + jsonPath: "$.paths./test", + }, + ], + } as ResultMessageRecord, + ]; + + const result = createBreakingChangeMdReport(msgs); + + expect(result.msgs).toBe(msgs); + expect(result.type).toBe("Result"); + expect(result.level).toBe("Error"); + expect(result.id).toBe("R001"); + expect(result.rawMessage).toBe(""); + expect(result.rows).toHaveLength(1); + }); + + it("should create report from Raw messages", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Raw", + level: "Warning", + message: "Runtime warning message", + time: new Date("2023-01-01"), + groupName: "test-group", + extra: { details: "Additional info" }, + }, + ]; + + const result = createBreakingChangeMdReport(msgs); + + expect(result.msgs).toBe(msgs); + expect(result.type).toBe("Raw"); + expect(result.level).toBe("Warning"); + expect(result.id).toBeUndefined(); + expect(result.rawMessage).toBe("Runtime warning message"); + expect(result.rows).toHaveLength(1); + }); + + it("should validate empty message array", () => { + const msgs: BrChMsgRecord[] = []; + + expect(() => createBreakingChangeMdReport(msgs)).toThrow(); + }); + + it("should warn about mixed message types", () => { + const msgs: BrChMsgRecord[] = [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Test message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com/rules/R001", + code: "TestError", + paths: [], + } as ResultMessageRecord, + { + type: "Raw", + level: "Error", + message: "Raw message", + time: new Date("2023-01-01"), + groupName: "test-group", + }, + ]; + + createBreakingChangeMdReport(msgs); + expect(logMessage).toHaveBeenCalledWith( + expect.stringContaining("Not all messages have the same type"), + LogLevel.Warn, + ); + }); + }); + + describe("sortBreakingChangeMdReports", () => { + it("should sort by type (Raw before Result)", () => { + const resultReport: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Result message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Result message |\\n"], + type: "Result", + level: "Error", + id: "R001", + rawMessage: "", + }; + + const rawReport: BreakingChangeMdReport = { + msgs: [ + { + type: "Raw", + level: "Error", + message: "Raw message", + time: new Date("2023-01-01"), + groupName: "test-group", + }, + ], + rows: ["| 1 | Raw message |\\n"], + type: "Raw", + level: "Error", + rawMessage: "Raw message", + }; + + const result = sortBreakingChangeMdReports([resultReport, rawReport]); + expect(result[0].type).toBe("Raw"); + expect(result[1].type).toBe("Result"); + }); + + it("should sort by level (Error before Warning before Info)", () => { + const infoReport: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R001", + level: "Info", + message: "Info message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestInfo", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Info message |\\n"], + type: "Result", + level: "Info", + id: "R001", + rawMessage: "", + }; + + const errorReport: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R002", + level: "Error", + message: "Error message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Error message |\\n"], + type: "Result", + level: "Error", + id: "R002", + rawMessage: "", + }; + + const result = sortBreakingChangeMdReports([infoReport, errorReport]); + expect(result[0].level).toBe("Error"); + expect(result[1].level).toBe("Info"); + }); + + it("should sort by ID when type and level are same", () => { + const reportB: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R002", + level: "Error", + message: "Message B", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Message B |\\n"], + type: "Result", + level: "Error", + id: "R002", + rawMessage: "", + }; + + const reportA: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Message A", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Message A |\\n"], + type: "Result", + level: "Error", + id: "R001", + rawMessage: "", + }; + + const result = sortBreakingChangeMdReports([reportB, reportA]); + expect(result[0].id).toBe("R001"); + expect(result[1].id).toBe("R002"); + }); + }); + + describe("reportToString", () => { + it("should convert report to markdown string", () => { + const report: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Test message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com/rules/R001", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Test message |\\n"], + type: "Result", + level: "Error", + id: "R001", + rawMessage: "", + }; + + const result = reportToString(report, 10); + + expect(result).toContain("## "); + expect(result).toContain("[R001 - TestError](https://docs.example.com/rules/R001)"); + expect(result).toContain("Displaying 1 out of 1 occurrences"); + expect(result).toContain("| Index | Description |"); + expect(result).toContain("| 1 | Test message |"); + }); + + it("should include deficit row when maxRowCount is exceeded", () => { + const msgs: BrChMsgRecord[] = Array.from( + { length: 5 }, + (_, i) => + ({ + type: "Result", + id: `R00${i + 1}`, + level: "Error", + message: `Test message ${i + 1}`, + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + }) as ResultMessageRecord, + ); + + const report = createBreakingChangeMdReport(msgs); + const result = reportToString(report, 3); + + expect(result).toContain("Displaying 3 out of 5 occurrences"); + expect(result).toContain("⚠️ To view the remaining 2 occurrences"); + expect(result).toContain("⚠️ 2 occurrences omitted"); + }); + }); + + describe("getReportLength", () => { + it("should return correct length of report string", () => { + const report: BreakingChangeMdReport = { + msgs: [ + { + type: "Result", + id: "R001", + level: "Error", + message: "Test message", + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com/rules/R001", + code: "TestError", + paths: [], + } as ResultMessageRecord, + ], + rows: ["| 1 | Test message |\\n"], + type: "Result", + level: "Error", + id: "R001", + rawMessage: "", + }; + + const expectedString = reportToString(report, 10); + const result = getReportLength(report, 10); + + expect(result).toBe(expectedString.length); + }); + }); + + describe("getRowCount", () => { + it("should return correct number of rows", () => { + const msgs: BrChMsgRecord[] = Array.from( + { length: 3 }, + (_, i) => + ({ + type: "Result", + id: `R00${i + 1}`, + level: "Error", + message: `Test message ${i + 1}`, + time: new Date("2023-01-01"), + docUrl: "https://docs.example.com", + code: "TestError", + paths: [], + }) as ResultMessageRecord, + ); + + const report = createBreakingChangeMdReport(msgs); + const result = getRowCount(report); + + expect(result).toBe(3); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/oad-message-processor.test.ts b/eng/tools/openapi-diff-runner/test/utils/oad-message-processor.test.ts new file mode 100644 index 000000000000..55580b4da7a4 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/oad-message-processor.test.ts @@ -0,0 +1,508 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import fs from "fs"; +import path from "path"; +import { + convertOadMessagesToResultMessageRecords, + createOadMessageProcessor, + createMessageKey, + appendToLogFile, + appendMarkdownToLog, + processAndAppendOadMessages, + clearMessageCache, + getMessageCacheSize, + OadMessageProcessorContext, +} from "../../src/utils/oad-message-processor.js"; +import { OadMessage } from "../../src/types/oad-types.js"; +import { MessageLevel } from "../../src/types/message.js"; +import { logMessage } from "../../src/log.js"; + +// Mock dependencies +vi.mock("fs"); +vi.mock("../../src/log.js"); +vi.mock("../../src/utils/common-utils.js", () => ({ + sourceBranchHref: vi.fn( + (location: string) => `https://github.com/owner/repo/blob/main/${location}`, + ), + specificBranchHref: vi.fn( + (location: string, branch: string) => + `https://github.com/owner/repo/blob/${branch}/${location}`, + ), +})); +vi.mock("../../src/types/breaking-change.js", () => ({ + logFileName: "breaking-change.log", +})); +vi.mock("../../src/command-helpers.js", () => ({ + defaultBreakingChangeBaseBranch: "main", +})); + +describe("oad-message-processor", () => { + const mockAppendFileSync = vi.mocked(fs.appendFileSync); + const mockLogMessage = vi.mocked(logMessage); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.resetAllMocks(); + }); + + describe("convertOadMessagesToResultMessageRecords", () => { + const createMockOadMessage = (overrides: Partial = {}): OadMessage => ({ + type: "Error", + code: "RemovedProperty", + message: "Test error message", + id: "test-id", + docUrl: "https://docs.example.com/rules/RemovedProperty", + mode: "test", + groupName: "stable", + new: { + location: "specification/test/new.json", + path: "$.definitions.TestModel", + }, + old: { + location: "specification/test/old.json", + path: "$.definitions.TestModel", + }, + ...overrides, + }); + + it("should convert OAD messages to result message records", () => { + const oadMessages: OadMessage[] = [createMockOadMessage()]; + + const result = convertOadMessagesToResultMessageRecords(oadMessages); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual({ + type: "Result", + level: "Error", + message: "Test error message", + code: "RemovedProperty", + id: "test-id", + docUrl: "https://docs.example.com/rules/RemovedProperty", + time: expect.any(Date), + groupName: "stable", + extra: { + mode: "test", + }, + paths: [ + { + tag: "New", + path: "https://github.com/owner/repo/blob/main/specification/test/new.json", + jsonPath: "$.definitions.TestModel", + }, + { + tag: "Old", + path: "https://github.com/owner/repo/blob/main/specification/test/old.json", + jsonPath: "$.definitions.TestModel", + }, + ], + }); + }); + + it("should handle custom base branch name", () => { + const oadMessages: OadMessage[] = [createMockOadMessage()]; + const customBaseBranch = "feature-branch"; + + const result = convertOadMessagesToResultMessageRecords(oadMessages, customBaseBranch); + + expect(result[0].paths[1].path).toBe( + "https://github.com/owner/repo/blob/feature-branch/specification/test/old.json", + ); + }); + + it("should handle messages with missing new location", () => { + const oadMessages: OadMessage[] = [ + createMockOadMessage({ + new: { location: "", path: "" }, + }), + ]; + + const result = convertOadMessagesToResultMessageRecords(oadMessages); + + expect(result[0].paths).toHaveLength(1); + expect(result[0].paths[0].tag).toBe("Old"); + }); + + it("should handle messages with missing old location", () => { + const oadMessages: OadMessage[] = [ + createMockOadMessage({ + old: { location: "", path: "" }, + }), + ]; + + const result = convertOadMessagesToResultMessageRecords(oadMessages); + + expect(result[0].paths).toHaveLength(1); + expect(result[0].paths[0].tag).toBe("New"); + }); + + it("should handle messages with no locations", () => { + const oadMessages: OadMessage[] = [ + createMockOadMessage({ + new: { location: "", path: "" }, + old: { location: "", path: "" }, + }), + ]; + + const result = convertOadMessagesToResultMessageRecords(oadMessages); + + expect(result[0].paths).toHaveLength(0); + }); + + it("should handle different message levels", () => { + const levels: MessageLevel[] = ["Error", "Warning", "Info"]; + const oadMessages: OadMessage[] = levels.map((level) => + createMockOadMessage({ type: level }), + ); + + const result = convertOadMessagesToResultMessageRecords(oadMessages); + + expect(result).toHaveLength(3); + result.forEach((msg, index) => { + expect(msg.level).toBe(levels[index]); + }); + }); + }); + + describe("createOadMessageProcessor", () => { + it("should create processor context with default folder", () => { + const context = createOadMessageProcessor("", "https://github.com/owner/repo/pull/123"); + + expect(context.logFilePath).toBe(path.join(".", "breaking-change.log")); + expect(context.prUrl).toBe("https://github.com/owner/repo/pull/123"); + expect(context.messageCache).toEqual([]); + }); + + it("should create processor context with custom folder", () => { + const context = createOadMessageProcessor( + "/custom/path", + "https://github.com/owner/repo/pull/456", + ); + + expect(context.logFilePath).toBe(path.join("/custom/path", "breaking-change.log")); + expect(context.prUrl).toBe("https://github.com/owner/repo/pull/456"); + expect(context.messageCache).toEqual([]); + }); + }); + + describe("createMessageKey", () => { + const createMockOadMessage = (overrides: Partial = {}): OadMessage => ({ + type: "Error", + code: "RemovedProperty", + message: "Test error message", + mode: "test", + groupName: "stable", + id: "test-id", + docUrl: "https://docs.example.com/rules/RemovedProperty", + new: { + location: "specification/test/new.json", + path: "$.definitions.TestModel", + }, + old: { + location: "specification/test/old.json", + path: "$.definitions.TestModel", + }, + ...overrides, + }); + + it("should create consistent keys for identical messages", () => { + const message1 = createMockOadMessage(); + const message2 = createMockOadMessage(); + + const key1 = createMessageKey(message1); + const key2 = createMessageKey(message2); + + expect(key1).toBe(key2); + }); + + it("should create different keys for different messages", () => { + const message1 = createMockOadMessage(); + const message2 = createMockOadMessage({ code: "AddedPropertyInResponse" }); + + const key1 = createMessageKey(message1); + const key2 = createMessageKey(message2); + + expect(key1).not.toBe(key2); + }); + + it("should create different keys for different locations", () => { + const message1 = createMockOadMessage(); + const message2 = createMockOadMessage({ + new: { location: "specification/test/different.json", path: "$.definitions.TestModel" }, + }); + + const key1 = createMessageKey(message1); + const key2 = createMessageKey(message2); + + expect(key1).not.toBe(key2); + }); + + it("should create different keys for different paths", () => { + const message1 = createMockOadMessage(); + const message2 = createMockOadMessage({ + new: { location: "specification/test/new.json", path: "$.definitions.DifferentModel" }, + }); + + const key1 = createMessageKey(message1); + const key2 = createMessageKey(message2); + + expect(key1).not.toBe(key2); + }); + }); + + describe("appendToLogFile", () => { + it("should append message to log file with newline", () => { + const logFilePath = "/path/to/log.txt"; + const message = "Test log message"; + + appendToLogFile(logFilePath, message); + + expect(mockAppendFileSync).toHaveBeenCalledTimes(2); + expect(mockAppendFileSync).toHaveBeenNthCalledWith(1, logFilePath, message); + expect(mockAppendFileSync).toHaveBeenNthCalledWith(2, logFilePath, "\n"); + expect(mockLogMessage).toHaveBeenCalledWith("oad-message-processor.appendMsg: " + message); + }); + }); + + describe("appendMarkdownToLog", () => { + it("should append markdown with default error level", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + const errorMsg = "Test markdown error"; + + appendMarkdownToLog(context, errorMsg); + + expect(mockAppendFileSync).toHaveBeenCalledTimes(2); + const appendedContent = mockAppendFileSync.mock.calls[0][1] as string; + const parsedContent = JSON.parse(appendedContent); + + expect(parsedContent).toEqual({ + type: "Markdown", + mode: "append", + level: "Error", + message: errorMsg, + time: expect.any(String), + }); + }); + + it("should append markdown with custom level", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + const errorMsg = "Test markdown warning"; + const levelType = "Warning"; + + appendMarkdownToLog(context, errorMsg, levelType); + + const appendedContent = mockAppendFileSync.mock.calls[0][1] as string; + const parsedContent = JSON.parse(appendedContent); + + expect(parsedContent.level).toBe(levelType); + }); + }); + + describe("processAndAppendOadMessages", () => { + const createMockOadMessage = (overrides: Partial = {}): OadMessage => ({ + type: "Error", + code: "RemovedProperty", + message: "Test error message", + id: "test-id", + docUrl: "https://docs.example.com/rules/RemovedProperty", + mode: "test", + groupName: "stable", + new: { + location: "specification/test/new.json", + path: "$.definitions.TestModel", + }, + old: { + location: "specification/test/old.json", + path: "$.definitions.TestModel", + }, + ...overrides, + }); + + it("should process and append new messages", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + const oadMessages = [createMockOadMessage()]; + const baseBranch = "main"; + + const result = processAndAppendOadMessages(context, oadMessages, baseBranch); + + expect(result).toHaveLength(1); + expect(context.messageCache).toHaveLength(1); + expect(mockAppendFileSync).toHaveBeenCalledTimes(2); + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("oad-message-processor.processAndAppendOadMessages"), + ); + }); + + it("should deduplicate messages", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + + const baseMessage = createMockOadMessage(); + const differentMessage = createMockOadMessage({ code: "AddedPropertyInResponse" }); + const oadMessages = [baseMessage, differentMessage]; + const baseBranch = "main"; + + const result = processAndAppendOadMessages(context, oadMessages, baseBranch); + + expect(result).toHaveLength(2); // Both unique messages + expect(context.messageCache).toHaveLength(2); + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("duplicateOadMessages.length: 0"), + ); + }); + + it("should test actual deduplication with cache", () => { + // First, add a message to the cache + const existingMessage = createMockOadMessage(); + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [existingMessage], + }; + + // Create new messages including one that matches the cached message + const sameMessage = { ...existingMessage }; + const differentMessage = createMockOadMessage({ code: "AddedPropertyInResponse" }); + const oadMessages = [sameMessage, differentMessage]; + const baseBranch = "main"; + + const result = processAndAppendOadMessages(context, oadMessages, baseBranch); + + expect(result).toHaveLength(1); // Only the different message should be processed + expect(context.messageCache).toHaveLength(2); // Original + new different message + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("duplicateOadMessages.length: 1"), + ); + }); + + it("should not process messages already in cache", () => { + const existingMessage = createMockOadMessage(); + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [existingMessage], + }; + const oadMessages = [createMockOadMessage()]; // Same as existing + const baseBranch = "main"; + + const result = processAndAppendOadMessages(context, oadMessages, baseBranch); + + expect(result).toHaveLength(0); // No new messages + expect(context.messageCache).toHaveLength(1); // Cache unchanged + expect(mockLogMessage).toHaveBeenCalledWith( + expect.stringContaining("duplicateOadMessages.length: 1"), + ); + }); + + it("should log processing statistics", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + + const message1 = createMockOadMessage(); + const message2 = createMockOadMessage({ code: "AddedPropertyInResponse" }); + const oadMessages = [message1, message2]; + const baseBranch = "develop"; + + processAndAppendOadMessages(context, oadMessages, baseBranch); + + expect(mockLogMessage).toHaveBeenCalledWith( + "oad-message-processor.processAndAppendOadMessages: PR:https://github.com/owner/repo/pull/123, " + + "baseBranch: develop, oadMessages.length: 2, duplicateOadMessages.length: 0, messageCache.length: 0.", + ); + }); + }); + + describe("clearMessageCache", () => { + it("should clear the message cache", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [ + { + type: "Error", + code: "RemovedProperty", + message: "Test", + id: "test-id", + docUrl: "https://docs.example.com/rules/RemovedProperty", + mode: "test", + groupName: "stable", + new: { location: "new.json", path: "$.path" }, + old: { location: "old.json", path: "$.path" }, + }, + ], + }; + + clearMessageCache(context); + + expect(context.messageCache).toHaveLength(0); + }); + }); + + describe("getMessageCacheSize", () => { + it("should return the size of message cache", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [ + { + type: "Error", + code: "RemovedProperty", + message: "Test 1", + id: "test-id-1", + docUrl: "https://docs.example.com/rules/RemovedProperty", + mode: "test", + groupName: "stable", + new: { location: "new.json", path: "$.path" }, + old: { location: "old.json", path: "$.path" }, + }, + { + type: "Warning", + code: "AddedPropertyInResponse", + message: "Test 2", + id: "test-id-2", + docUrl: "https://docs.example.com/rules/AddedPropertyInResponse", + mode: "test", + groupName: "preview", + new: { location: "new2.json", path: "$.path2" }, + old: { location: "old2.json", path: "$.path2" }, + }, + ], + }; + + const size = getMessageCacheSize(context); + + expect(size).toBe(2); + }); + + it("should return 0 for empty cache", () => { + const context: OadMessageProcessorContext = { + logFilePath: "/path/to/log.txt", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }; + + const size = getMessageCacheSize(context); + + expect(size).toBe(0); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/test/utils/pull-request.test.ts b/eng/tools/openapi-diff-runner/test/utils/pull-request.test.ts new file mode 100644 index 000000000000..72f52fad6e33 --- /dev/null +++ b/eng/tools/openapi-diff-runner/test/utils/pull-request.test.ts @@ -0,0 +1,404 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { existsSync, mkdirSync } from "node:fs"; +import path from "node:path"; +import { simpleGit } from "simple-git"; +import { createPullRequestProperties } from "../../src/utils/pull-request.js"; +import { Context } from "../../src/types/breaking-change.js"; + +// Mock dependencies +vi.mock("node:fs"); +vi.mock("node:path"); +vi.mock("simple-git"); + +describe("pull-request", () => { + const mockExistsSync = vi.mocked(existsSync); + const mockMkdirSync = vi.mocked(mkdirSync); + const mockPath = vi.mocked(path); + const mockSimpleGit = vi.mocked(simpleGit); + + // Mock git repository instance + const mockGitRepo = { + branch: vi.fn(), + init: vi.fn(), + getRemotes: vi.fn(), + addRemote: vi.fn(), + pull: vi.fn(), + fetch: vi.fn(), + checkout: vi.fn(), + }; + + beforeEach(() => { + vi.clearAllMocks(); + + // Setup default mocks + mockSimpleGit.mockReturnValue(mockGitRepo as any); + mockPath.resolve.mockImplementation((...paths) => paths.join("/")); + mockPath.join.mockImplementation((...paths) => paths.join("/")); + + // Mock console.log to avoid test output noise + vi.spyOn(console, "log").mockImplementation(() => {}); + }); + + afterEach(() => { + vi.resetAllMocks(); + }); + + describe("createPullRequestProperties", () => { + const createMockContext = (overrides: Partial = {}): Context => ({ + localSpecRepoPath: "/path/to/spec/repo", + workingFolder: "/path/to/working/folder", + logFileFolder: "/path/to/log/folder", + swaggerDirs: ["specification/service/path"], + baseBranch: "main", + headCommit: "abc123", + runType: "SameVersion", + checkName: "BreakingChange", + repo: "owner/repo", + prNumber: "123", + prSourceBranch: "feature-branch", + prTargetBranch: "main", + oadMessageProcessorContext: { + logFilePath: "/path/to/log/file.log", + prUrl: "https://github.com/owner/repo/pull/123", + messageCache: [], + }, + prUrl: "https://github.com/owner/repo/pull/123", + ...overrides, + }); + + it("should return undefined when baseBranch is undefined", async () => { + const context = createMockContext({ baseBranch: undefined }); + + const result = await createPullRequestProperties(context, "test-prefix"); + + expect(result).toBeUndefined(); + }); + + it("should create pull request properties successfully", async () => { + const context = createMockContext(); + const prefix = "test-prefix"; + + // Mock branch listing + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + + // Mock directory operations + mockExistsSync.mockReturnValue(false); + mockPath.resolve.mockReturnValue("/resolved/working/dir"); + + // Mock git operations + mockGitRepo.getRemotes.mockResolvedValue([]); + mockGitRepo.init.mockResolvedValue(undefined); + mockGitRepo.addRemote.mockResolvedValue(undefined); + mockGitRepo.pull.mockResolvedValue(undefined); + mockGitRepo.fetch.mockResolvedValue(undefined); + mockGitRepo.checkout.mockResolvedValue(undefined); + + const result = await createPullRequestProperties(context, prefix); + + expect(result).toBeDefined(); + expect(result!.baseBranch).toBe("main"); + expect(result!.targetBranch).toBe("main"); + expect(result!.sourceBranch).toBe("source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"); + expect(result!.workingDir).toBe("/resolved/working/dir"); + expect(result!.currentBranch).toBe("main"); + expect(typeof result!.checkout).toBe("function"); + }); + + it("should create source branch if it doesn't exist", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main"], // Source branch doesn't exist + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockGitRepo.branch).toHaveBeenCalledWith([ + "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8", + ]); + }); + + it("should create base branch if it doesn't exist and skipInitializeBase is false", async () => { + const context = createMockContext({ baseBranch: "develop" }); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], // Base branch doesn't exist + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix", false); + + expect(mockGitRepo.branch).toHaveBeenCalledWith(["develop", "remotes/origin/develop"]); + }); + + it("should not create base branch if skipInitializeBase is true", async () => { + const context = createMockContext({ baseBranch: "develop" }); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix", true); + + expect(mockGitRepo.branch).not.toHaveBeenCalledWith(["develop", "remotes/origin/develop"]); + expect(mockGitRepo.fetch).not.toHaveBeenCalledWith("origin", "develop"); + }); + + it("should create target branch if it doesn't exist", async () => { + const context = createMockContext({ prTargetBranch: "feature-branch" }); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], // Target branch doesn't exist + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockGitRepo.branch).toHaveBeenCalledWith([ + "feature-branch", + "remotes/origin/feature-branch", + ]); + }); + + it("should create working directory if it doesn't exist", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(false); // Directory doesn't exist + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockMkdirSync).toHaveBeenCalledWith(expect.any(String)); + }); + + it("should add origin remote if it doesn't exist", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([]); // No remotes exist + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockGitRepo.addRemote).toHaveBeenCalledWith("origin", context.localSpecRepoPath); + }); + + it("should not add origin remote if it already exists", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); // Origin exists + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockGitRepo.addRemote).not.toHaveBeenCalled(); + }); + + it("should handle getRemotes error and try to add origin anyway", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockRejectedValue(new Error("Failed to get remotes")); + mockGitRepo.addRemote.mockResolvedValue(undefined); + + await createPullRequestProperties(context, "test-prefix"); + + expect(mockGitRepo.addRemote).toHaveBeenCalledWith("origin", context.localSpecRepoPath); + }); + + it("should ignore 'remote origin already exists' error when adding remote", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockRejectedValue(new Error("Failed to get remotes")); + mockGitRepo.addRemote.mockRejectedValue(new Error("fatal: remote origin already exists")); + + // Should not throw an error + await expect(createPullRequestProperties(context, "test-prefix")).resolves.toBeDefined(); + }); + + it("should throw error if addRemote fails with other error", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockRejectedValue(new Error("Failed to get remotes")); + mockGitRepo.addRemote.mockRejectedValue(new Error("Some other error")); + + await expect(createPullRequestProperties(context, "test-prefix")).rejects.toThrow( + "Some other error", + ); + }); + + it("should perform git operations in correct order", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix"); + + // Verify order of operations + expect(mockGitRepo.init).toHaveBeenCalled(); + expect(mockGitRepo.pull).toHaveBeenCalledWith("origin", "main"); + expect(mockGitRepo.fetch).toHaveBeenCalledWith( + "origin", + "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8", + ); + expect(mockGitRepo.fetch).toHaveBeenCalledWith("origin", "main"); + expect(mockGitRepo.checkout).toHaveBeenCalledWith("main"); + }); + + describe("checkout function", () => { + it("should checkout to different branch", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + const result = await createPullRequestProperties(context, "test-prefix"); + + expect(result).toBeDefined(); + expect(result!.currentBranch).toBe("main"); + + // Test checkout function + await result!.checkout("feature-branch"); + + expect(mockGitRepo.checkout).toHaveBeenCalledWith(["feature-branch"]); + expect(result!.currentBranch).toBe("feature-branch"); + }); + + it("should not checkout if already on the target branch", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + const result = await createPullRequestProperties(context, "test-prefix"); + + expect(result).toBeDefined(); + expect(result!.currentBranch).toBe("main"); + + // Clear previous checkout calls + mockGitRepo.checkout.mockClear(); + + // Test checkout to same branch + await result!.checkout("main"); + + expect(mockGitRepo.checkout).not.toHaveBeenCalled(); + expect(result!.currentBranch).toBe("main"); + }); + + it("should update currentBranch after successful checkout", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + const result = await createPullRequestProperties(context, "test-prefix"); + + expect(result).toBeDefined(); + expect(result!.currentBranch).toBe("main"); + + // Test multiple checkouts + await result!.checkout("feature-branch"); + expect(result!.currentBranch).toBe("feature-branch"); + + await result!.checkout("develop"); + expect(result!.currentBranch).toBe("develop"); + + // Verify checkout was called for each different branch + expect(mockGitRepo.checkout).toHaveBeenCalledTimes(3); // 1 initial + 2 from tests + }); + }); + + it("should use correct working directory path", async () => { + const context = createMockContext(); + const prefix = "custom-prefix"; + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + // Mock path operations + vi.spyOn(process, "cwd").mockReturnValue("/current/working/dir"); + mockPath.join.mockReturnValue("../custom-prefix-c93b354fd9c14905bb574a8834c4d69b"); + mockPath.resolve.mockReturnValue("/resolved/working/dir"); + + const result = await createPullRequestProperties(context, prefix); + + expect(mockPath.join).toHaveBeenCalledWith( + "/current/working/dir", + "..", + "custom-prefix-c93b354fd9c14905bb574a8834c4d69b", + ); + expect(mockPath.resolve).toHaveBeenCalledWith( + "../custom-prefix-c93b354fd9c14905bb574a8834c4d69b", + ); + expect(result!.workingDir).toBe("/resolved/working/dir"); + }); + + it("should use correct git options", async () => { + const context = createMockContext(); + + mockGitRepo.branch.mockResolvedValue({ + all: ["main", "source-b6791c5f-e0a5-49b1-9175-d7fd3e341cb8"], + }); + mockExistsSync.mockReturnValue(true); + mockGitRepo.getRemotes.mockResolvedValue([{ name: "origin" }]); + + await createPullRequestProperties(context, "test-prefix"); + + // Verify simpleGit was called with correct options + expect(mockSimpleGit).toHaveBeenCalledWith({ + baseDir: context.localSpecRepoPath, + binary: "git", + maxConcurrentProcesses: 1, + }); + + expect(mockSimpleGit).toHaveBeenCalledWith({ + baseDir: expect.any(String), // working directory + binary: "git", + maxConcurrentProcesses: 1, + }); + }); + }); +}); diff --git a/eng/tools/openapi-diff-runner/tsconfig.json b/eng/tools/openapi-diff-runner/tsconfig.json new file mode 100644 index 000000000000..58ee0b661a2b --- /dev/null +++ b/eng/tools/openapi-diff-runner/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "allowJs": true, + "resolveJsonModule": true, + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], +} diff --git a/eng/tools/package.json b/eng/tools/package.json index b57ea734b786..981da6b3bb5e 100644 --- a/eng/tools/package.json +++ b/eng/tools/package.json @@ -1,14 +1,16 @@ { "name": "azure-rest-api-specs-eng-tools", "devDependencies": { - "@azure-tools/specs-model": "file:specs-model", + "@azure-tools/lint-diff": "file:lint-diff", + "@azure-tools/oav-runner": "file:oav-runner", + "@azure-tools/sdk-suppressions": "file:sdk-suppressions", + "@azure-tools/openapi-diff-runner": "file:openapi-diff-runner", + "@azure-tools/spec-gen-sdk-runner": "file:spec-gen-sdk-runner", "@azure-tools/suppressions": "file:suppressions", "@azure-tools/tsp-client-tests": "file:tsp-client-tests", + "@azure-tools/typespec-migration-validation": "file:typespec-migration-validation", "@azure-tools/typespec-requirement": "file:typespec-requirement", - "@azure-tools/typespec-validation": "file:typespec-validation", - "@azure-tools/sdk-suppressions": "file:sdk-suppressions", - "@azure-tools/spec-gen-sdk-runner": "file:spec-gen-sdk-runner", - "@azure-tools/lint-diff": "file:lint-diff" + "@azure-tools/typespec-validation": "file:typespec-validation" }, "scripts": { "build": "tsc --build", diff --git a/eng/tools/sdk-suppressions/package.json b/eng/tools/sdk-suppressions/package.json index cbefd74bd76e..2d27648ca1d9 100644 --- a/eng/tools/sdk-suppressions/package.json +++ b/eng/tools/sdk-suppressions/package.json @@ -9,6 +9,9 @@ }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", "test:ci": "vitest run --coverage --reporter=verbose" }, @@ -16,14 +19,19 @@ "node": ">=20.0.0" }, "dependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", "ajv": "^8.17.1", + "debug": "^4.4.0", "lodash": "^4.17.20", + "simple-git": "^3.27.0", "yaml": "^2.4.2" }, "devDependencies": { + "@types/debug": "^4.1.12", "@types/lodash": "^4.14.161", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" } diff --git a/eng/tools/sdk-suppressions/src/common.ts b/eng/tools/sdk-suppressions/src/common.ts index b13781e4e20c..8fb57b0d62eb 100644 --- a/eng/tools/sdk-suppressions/src/common.ts +++ b/eng/tools/sdk-suppressions/src/common.ts @@ -1,19 +1,32 @@ import { parse as yamlParse } from "yaml"; +import { getChangedFiles } from "@azure-tools/specs-shared/changed-files"; -import { exec } from "child_process"; -import { promisify } from "util"; +/** + * @returns {string[]} + * @description get the changed files in the current PR + */ +export async function getSDKSuppressionsChangedFiles() { + const changedFiles = await getChangedFiles(); + const sdkSuppressionsFiles = changedFiles.filter((file) => + file.endsWith("sdk-suppressions.yaml"), + ); + return sdkSuppressionsFiles; +} /** - * @param yamlContent + * @param yamlContent * @returns {result: string | object | undefined | null, message: string} * special return * if the content is empty, return {result: null, message: string * if the file parse error, return {result: undefined, message: string */ -export function parseYamlContent(yamlContent: string, path: string): { +export function parseYamlContent( + yamlContent: string, + path: string, +): { result: string | object | undefined | null; message: string; -}{ +} { let content = undefined; // if yaml file is not a valid yaml, catch error and return undefined try { @@ -22,42 +35,22 @@ export function parseYamlContent(yamlContent: string, path: string): { console.error(`The file parsing failed in the ${path}. Details: ${error}`); return { result: content, - message: `The file parsing failed in the ${path}. Details: ${error}` - };; + message: `The file parsing failed in the ${path}. Details: ${error}`, + }; } - + // if yaml file is empty, run yaml.safeload success but get undefined // to identify whether it is empty return null to distinguish. if (!content) { - console.info(`The file in the ${path} has been successfully parsed, but it is an empty file.`) + console.info(`The file in the ${path} has been successfully parsed, but it is an empty file.`); return { result: null, - message: `The file in the ${path} has been successfully parsed, but it is an empty file.` - };; + message: `The file in the ${path} has been successfully parsed, but it is an empty file.`, + }; } - + return { result: content, - message: 'The file has been successfully parsed.' + message: "The file has been successfully parsed.", }; - -} - -// Promisify the exec function -const execAsync = promisify(exec); - -export async function runGitCommand(command: string): Promise { - try { - const { stdout, stderr } = await execAsync(command); - - if (stderr) { - console.error("Error Output:", stderr); - // throw new Error(stderr); - } - - return stdout.trim(); - } catch (error:any) { - console.error("Error details:", error.stderr || error); - throw error; - } } diff --git a/eng/tools/sdk-suppressions/src/index.ts b/eng/tools/sdk-suppressions/src/index.ts index 5100ec2104d8..3f3635e82695 100644 --- a/eng/tools/sdk-suppressions/src/index.ts +++ b/eng/tools/sdk-suppressions/src/index.ts @@ -1,39 +1,38 @@ - import { exit } from "process"; import { updateSdkSuppressionsLabels } from "./updateSdkSuppressionsLabel.js"; function getArgsError(args: string[]): string { return ( - "Get args lengths: " + args.length + "\n" + - "Details: " + args.join(', ') + "\n" + - "Usage: node eng/tools/sdk-suppressions/cmd/sdk-suppressions-label.js baseCommitHash headCommitHash changeFiles prLabels\n" + + "Get args lengths: " + + args.length + + "\n" + + "Details: " + + args.join(", ") + + "\n" + + "Usage: node eng/tools/sdk-suppressions/cmd/sdk-suppressions-label.js baseCommitHash headCommitHash prLabels\n" + "Returns: {labelsToAdd: [label1, label2],labelsToRemove: [lable3, label4]}\n" + "Parameters:\n" + " baseCommitHash: The base commit hash. Example: HEAD^ \n" + " headCommitHash: The head commit hash. Example: HEAD \n" + - " changeFiles: The changed files. Example: 'specification/workloads/Workloads.Operations.Management/sdk-suppressions.yaml specification/workloads/Workloads.Operations.Management/main.tsp'\n" + - " prLabels: The PR has added labels. Example: '['BreakingChange-Go-Sdk-Suppression', 'BreakingChange-Python-Sdk-Suppression']'\n" + " prLabels: All pull reqeuest labels have been added, including breaking-language-sdk-suppression. Example: '['BreakingChange-Go-Sdk-Suppression', 'BreakingChange-Python-Sdk-Suppression']'\n" ); } export async function main() { const args: string[] = process.argv.slice(2); - if (args.length === 4) { + if (args.length === 3) { const baseCommitHash: string = args[0]; const headCommitHash: string = args[1]; - const changeFiles: string = args[2]; - const lables: string = args[3]; + const lables: string = args[2]; const outputFile = process.env.OUTPUT_FILE as string; - const changedLabels: {labelsToAdd: String[], labelsToRemove: String[]} = await updateSdkSuppressionsLabels(lables, changeFiles, baseCommitHash, headCommitHash, outputFile); + const changedLabels: { labelsToAdd: String[]; labelsToRemove: String[] } = + await updateSdkSuppressionsLabels(baseCommitHash, headCommitHash, lables, outputFile); console.log(JSON.stringify(changedLabels)); exit(0); } else { console.error(getArgsError(args)); exit(1); } - } - -export { updateSdkSuppressionsLabels }; - +export { updateSdkSuppressionsLabels }; diff --git a/eng/tools/sdk-suppressions/src/sdk.ts b/eng/tools/sdk-suppressions/src/sdk.ts deleted file mode 100644 index 8d1e3ec2452d..000000000000 --- a/eng/tools/sdk-suppressions/src/sdk.ts +++ /dev/null @@ -1,59 +0,0 @@ -/** - * This file is the single source of truth for the labels used by the SDK generation tooling - * in the Azure/azure-rest-api-specs and Azure/azure-rest-api-specs-pr repositories. - * - * For additional context, see: - * - https://gist.github.com/raych1/353949d19371b69fb82a10dd70032a51 - * - https://github.com/Azure/azure-sdk-tools/issues/6327 - * - https://microsoftapc-my.sharepoint.com/:w:/g/personal/raychen_microsoft_com/EbOAA9SkhQhGlgxtf7mc0kUB-25bFue0EFbXKXS3TFLTQA - */ -export type SdkName = - | "azure-sdk-for-go" - | "azure-sdk-for-java" - | "azure-sdk-for-js" - | "azure-sdk-for-net" - | "azure-sdk-for-python" - -export const sdkLabels: { - [sdkName in SdkName]: { - breakingChange: string | undefined; - breakingChangeApproved: string | undefined; - breakingChangeSuppression: string | undefined; - breakingChangeSuppressionApproved: string | undefined; - }; -} = { - "azure-sdk-for-go": { - breakingChange: "BreakingChange-Go-Sdk", - breakingChangeApproved: "BreakingChange-Go-Sdk-Approved", - breakingChangeSuppression: "BreakingChange-Go-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-Go-Sdk-Suppression-Approved", - }, - "azure-sdk-for-java": { - breakingChange: "BreakingChange-Java-Sdk", - breakingChangeApproved: "BreakingChange-Java-Sdk-Approved", - breakingChangeSuppression: "BreakingChange-Java-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-Java-Sdk-Suppression-Approved" - }, - "azure-sdk-for-js": { - breakingChange: "BreakingChange-JavaScript-Sdk", - breakingChangeApproved: "BreakingChange-JavaScript-Sdk-Approved", - breakingChangeSuppression: "BreakingChange-JavaScript-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-JavaScript-Sdk-Suppression-Approved" - }, - "azure-sdk-for-net": { - breakingChange: undefined, - breakingChangeApproved: undefined, - breakingChangeSuppression: undefined, - breakingChangeSuppressionApproved: undefined - }, - "azure-sdk-for-python": { - breakingChange: "BreakingChange-Python-Sdk", - breakingChangeApproved: "BreakingChange-Python-Sdk-Approved", - breakingChangeSuppression: "BreakingChange-Python-Sdk-Suppression", - breakingChangeSuppressionApproved: - "BreakingChange-Python-Sdk-Suppression-Approved" - } -}; diff --git a/eng/tools/sdk-suppressions/src/sdkSuppressions.ts b/eng/tools/sdk-suppressions/src/sdkSuppressions.ts index 3a379bba78d2..b67e3444c934 100644 --- a/eng/tools/sdk-suppressions/src/sdkSuppressions.ts +++ b/eng/tools/sdk-suppressions/src/sdkSuppressions.ts @@ -5,7 +5,7 @@ */ import { Ajv } from "ajv"; -import { SdkName, sdkLabels } from "./sdk.js"; +import { SdkName, sdkLabels } from "@azure-tools/specs-shared/sdk-types"; export const sdkSuppressionsFileName = "sdk-suppressions.yaml"; @@ -22,9 +22,15 @@ export type SdkPackageSuppressionsEntry = { "breaking-changes": string[]; }; -function exitWithError(error: string): never { - console.error("Error:", error); - process.exit(1); +function errorResult(error: string): { + result: boolean; + message: string; +} { + console.error("Error:", error); + return { + result: false, + message: error, + }; } export function validateSdkSuppressionsFile( @@ -34,11 +40,13 @@ export function validateSdkSuppressionsFile( message: string; } { if (suppressionContent === null) { - exitWithError("This suppression file is a empty file"); + return errorResult("This suppression file is a empty file"); } if (!suppressionContent) { - exitWithError("This suppression file is not a valid yaml. Refer to https://aka.ms/azsdk/sdk-suppression for more information."); + return errorResult( + "This suppression file is not a valid yaml. Refer to https://aka.ms/azsdk/sdk-suppression for more information.", + ); } const suppressionFileSchema = { @@ -80,6 +88,9 @@ export function validateSdkSuppressionsFile( message: "This suppression file is a valid yaml.", }; } else { - exitWithError("This suppression file is a valid yaml but the schema is wrong: " + suppressionAjv.errorsText(suppressionAjvCompile.errors, { separator: "\n" })); + return errorResult( + "This suppression file is a valid yaml but the schema is wrong: " + + suppressionAjv.errorsText(suppressionAjvCompile.errors, { separator: "\n" }), + ); } } diff --git a/eng/tools/sdk-suppressions/src/updateSdkSuppressionsLabel.ts b/eng/tools/sdk-suppressions/src/updateSdkSuppressionsLabel.ts index da9997d6a69f..434049359f8a 100644 --- a/eng/tools/sdk-suppressions/src/updateSdkSuppressionsLabel.ts +++ b/eng/tools/sdk-suppressions/src/updateSdkSuppressionsLabel.ts @@ -1,6 +1,8 @@ import _ from "lodash"; +import debug from "debug"; import { writeFileSync } from "fs"; -import { sdkLabels, SdkName } from "./sdk.js"; +import { simpleGit } from "simple-git"; +import { sdkLabels, SdkName } from "@azure-tools/specs-shared/sdk-types"; import { SdkSuppressionsYml, SdkSuppressionsSection, @@ -8,7 +10,10 @@ import { SdkPackageSuppressionsEntry, validateSdkSuppressionsFile, } from "./sdkSuppressions.js"; -import { parseYamlContent, runGitCommand } from "./common.js"; +import { getSDKSuppressionsChangedFiles, parseYamlContent } from "./common.js"; + +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); /** * @@ -22,26 +27,37 @@ import { parseYamlContent, runGitCommand } from "./common.js"; * on the other hand that the sdkName list will return an empty array if it does not have a suppression file or if the file is blank. */ export async function getSdkSuppressionsSdkNames( - prChangeFiles: string, + prChangeFiles: string[], baseCommitHash: string, - headCommitHash: string + headCommitHash: string, ): Promise { - console.log(`Will compare base commit: ${baseCommitHash} and head commit: ${headCommitHash} to get different SDK.`); - const filesChangedPaths = prChangeFiles.split(" "); - console.log(`The pr origin changed files: ${filesChangedPaths.join(", ")}`); - let suppressionFileList = filterSuppressionList(filesChangedPaths); + console.log( + `Will compare base commit: ${baseCommitHash} and head commit: ${headCommitHash} to get different SDK.`, + ); + console.log(`The pr origin changed files: ${prChangeFiles.join(", ")}`); + let suppressionFileList = filterSuppressionList(prChangeFiles); console.log(`Will compare sdk-suppression.yaml files: ${suppressionFileList.join(", ")}`); let sdkNameList: SdkName[] = []; if (suppressionFileList.length > 0) { for (const suppressionFile of suppressionFileList) { - let baseSuppressionContent = await getSdkSuppressionsFileContent(baseCommitHash, suppressionFile); - const headSuppressionContent = await getSdkSuppressionsFileContent(headCommitHash, suppressionFile); + let baseSuppressionContent = await getSdkSuppressionsFileContent( + baseCommitHash, + suppressionFile, + ); + const headSuppressionContent = await getSdkSuppressionsFileContent( + headCommitHash, + suppressionFile, + ); // if the head suppression file is present but anything is wrong like schema error with it return const validateSdkSuppressionsFileResult = validateSdkSuppressionsFile(headSuppressionContent).result; + // If the head suppression file is not valid or empty, we get _sdkNameList with []. if (!validateSdkSuppressionsFileResult) { - return []; + console.log( + `Returned empty SDK name list — head suppression file at ${suppressionFile}/${headCommitHash} is invalid or empty.`, + ); + continue; } // if base suppression file does not exist, set it to an empty object but has correct schema if (!baseSuppressionContent) { @@ -50,15 +66,19 @@ export async function getSdkSuppressionsSdkNames( console.log( `updateSdkSuppressionsLabels: Will compare base suppressions content:\n ` + - `${JSON.stringify(baseSuppressionContent)}\n ` + - `and head suppressions content:\n ` + - `${JSON.stringify(headSuppressionContent)} to get different SDK.`, + `${JSON.stringify(baseSuppressionContent)}\n ` + + `and head suppressions content:\n ` + + `${JSON.stringify(headSuppressionContent)} to get different SDK.`, ); - sdkNameList = getSdkNamesWithChangedSuppressions( + let _sdkNameList = getSdkNamesWithChangedSuppressions( headSuppressionContent as SdkSuppressionsYml, baseSuppressionContent as SdkSuppressionsYml, ); + console.log( + `Retrieved SDK names after comparing suppression file ${suppressionFile}: [${_sdkNameList.join(",")}].`, + ); + sdkNameList = [..._sdkNameList, ...sdkNameList]; } } @@ -70,7 +90,7 @@ export async function getSdkSuppressionsFileContent( path: string, ): Promise { try { - const suppressionFileContent = await runGitCommand(`git show ${ref}:${path}`); + const suppressionFileContent = await simpleGit().show([`${ref}:${path}`]); console.log(`Found content in ${ref}#${path}`); return parseYamlContent(suppressionFileContent, path).result; } catch (error) { @@ -84,11 +104,11 @@ function getSdksWithSuppressionsDefined(suppressions: SdkSuppressionsSection): S } /** - * - * @param headSuppressionFile - * @param baseSuppressionFile + * + * @param headSuppressionFile + * @param baseSuppressionFile * @returns SdkName[] - * + * * Analyze the suppression files across three dimensions: language, package, and breaking-change. Finally, determine the outermost sdkName. */ @@ -174,10 +194,9 @@ export function getSdkNamesWithChangedSuppressions( /** * - * @param prLabels - * @param prChangeFiles * @param baseCommitHash * @param headCommitHash + * @param prLabels * @param outputFile * @returns { labelsToAdd: String[]; labelsToRemove: String[] } * This code performs two key functions: @@ -185,19 +204,20 @@ export function getSdkNamesWithChangedSuppressions( * Second, it compares the SDKNames obtained in the previous step with the existing PR labels and processes the PR labels accordingly. */ export async function updateSdkSuppressionsLabels( - prLabels: string, - prChangeFiles: string, baseCommitHash: string, headCommitHash: string, + prLabels: string, outputFile?: string, ): Promise<{ labelsToAdd: String[]; labelsToRemove: String[] }> { try { - const status = await runGitCommand("git status"); - console.log("Git status:", status); + const result = await simpleGit().raw("status"); + console.log("Git status:", result); } catch (err) { console.error("Error running git command:", err); } + const prChangeFiles = await getSDKSuppressionsChangedFiles(); + const sdkNames = await getSdkSuppressionsSdkNames(prChangeFiles, baseCommitHash, headCommitHash); console.log( @@ -209,7 +229,7 @@ export async function updateSdkSuppressionsLabels( const result = processLabels(presentLabels, sdkNames); - if(outputFile){ + if (outputFile) { writeFileSync(outputFile, JSON.stringify(result)); console.log(`😊 JSON output saved to ${outputFile}`); } @@ -218,20 +238,23 @@ export async function updateSdkSuppressionsLabels( } /** - * - * @param presentLabels - * @param sdkNames + * + * @param presentLabels + * @param sdkNames * @returns {labelsToAdd: String[], labelsToRemove: String[]} - * + * * Based on the various sdknames and existing labels, process the suppression label of PR. - * - * Add logic: If the breakingChangeSuppression label corresponding to an SDK in sdkNames is not in the current presentLabels list, + * + * Add logic: If the breakingChangeSuppression label corresponding to an SDK in sdkNames is not in the current presentLabels list, * add the label to labelsToAdd. - * Remove logic: If a label is in presentLabels and the corresponding breakingChangeSuppression is not in sdkNames + * Remove logic: If a label is in presentLabels and the corresponding breakingChangeSuppression is not in sdkNames * and there is no corresponding breakingChangeSuppressionApproved label, then the label is deleted. * Otherwise, the label is not deleted. */ -export function processLabels(presentLabels: string[], sdkNames: string[]): { labelsToAdd: String[]; labelsToRemove: String[] } { +export function processLabels( + presentLabels: string[], + sdkNames: string[], +): { labelsToAdd: String[]; labelsToRemove: String[] } { // The sdkNames indicates whether any suppression files have been modified. If it is empty // then check if the suppression label was previously applied and remove it if so. Otherwise, no action is needed. let addSdkSuppressionsLabels: string[] = []; @@ -240,33 +263,39 @@ export function processLabels(presentLabels: string[], sdkNames: string[]): { la const sdk = sdkLabels[sdkName as SdkName]; const breakingChangeSuppression = sdk.breakingChangeSuppression; // If breakingChangeSuppression is not in the existing labels, add it to labelsToAdd - if ( - breakingChangeSuppression && - !presentLabels.includes(breakingChangeSuppression) - ) { + if (breakingChangeSuppression && !presentLabels.includes(breakingChangeSuppression)) { addSdkSuppressionsLabels.push(breakingChangeSuppression); } }); - - presentLabels.forEach(label => { + + presentLabels.forEach((label) => { // Check if it is a suppression label - const suppressionLabelExists = Object.values(sdkLabels).some(sdk => { - return sdk.breakingChangeSuppression === label; + const suppressionLabelExists = Object.values(sdkLabels).some((sdk) => { + return sdk.breakingChangeSuppression === label; }); - + // If it is a suppression label if (suppressionLabelExists) { // Check if there is a corresponding approved label - const hasApprovedLabel = Object.values(sdkLabels).some(sdk => { - return sdk.breakingChangeSuppression === label && sdk.breakingChangeSuppressionApproved && presentLabels.includes(sdk.breakingChangeSuppressionApproved); + const hasApprovedLabel = Object.values(sdkLabels).some((sdk) => { + return ( + sdk.breakingChangeSuppression === label && + sdk.breakingChangeSuppressionApproved && + presentLabels.includes(sdk.breakingChangeSuppressionApproved) + ); }); // If there is no corresponding approved label and there is no suppression label in sdkNames, delete it. - if (!hasApprovedLabel && !sdkNames.some(sdkName => sdkLabels[sdkName as SdkName].breakingChangeSuppression === label)) { + if ( + !hasApprovedLabel && + !sdkNames.some( + (sdkName) => sdkLabels[sdkName as SdkName].breakingChangeSuppression === label, + ) + ) { removeSdkSuppressionsLabels.push(label); } } }); - + return { labelsToAdd: addSdkSuppressionsLabels, labelsToRemove: removeSdkSuppressionsLabels, diff --git a/eng/tools/sdk-suppressions/test/updateSdkSuppressionsLabel.test.ts b/eng/tools/sdk-suppressions/test/updateSdkSuppressionsLabel.test.ts index e03c3b570012..802e7ba6982f 100644 --- a/eng/tools/sdk-suppressions/test/updateSdkSuppressionsLabel.test.ts +++ b/eng/tools/sdk-suppressions/test/updateSdkSuppressionsLabel.test.ts @@ -1,5 +1,9 @@ import { vi, expect, test } from "vitest"; -import { filterSuppressionList, getSdkNamesWithChangedSuppressions, processLabels } from "../src/updateSdkSuppressionsLabel.js"; +import { + filterSuppressionList, + getSdkNamesWithChangedSuppressions, + processLabels, +} from "../src/updateSdkSuppressionsLabel.js"; import { validateSdkSuppressionsFile } from "../src/sdkSuppressions.js"; vi.mock("process", () => ({ @@ -8,153 +12,154 @@ vi.mock("process", () => ({ test("test filterSuppressionList for only resource-manager files", () => { const changeFiles = [ - "specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/datafactory.json", - "specification/datafactory/resource-manager/sdk-suppressions.yaml" + "specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/datafactory.json", + "specification/datafactory/resource-manager/sdk-suppressions.yaml", ]; const suppressionsFiles: String[] = filterSuppressionList(changeFiles); - expect(suppressionsFiles).toEqual(["specification/datafactory/resource-manager/sdk-suppressions.yaml"]); + expect(suppressionsFiles).toEqual([ + "specification/datafactory/resource-manager/sdk-suppressions.yaml", + ]); }); test("test filterSuppressionList for both tsp files and resource-manager files", () => { const changeFiles = [ - "specification/workloads/Workloads.Operations.Management/main.tsp", - "specification/workloads/Workloads.Operations.Management/sdk-suppressions.yaml", - "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2023-10-01-preview/operations.json", - "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2024-02-01-preview/operations.json", - "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2023-12-01-preview/operations.json", - "specification/workloads/resource-manager/Microsoft.Workloads/operations/stable/2024-09-01/operations.json", - "specification/workloads/resource-manager/sdk-suppressions.yaml" + "specification/workloads/Workloads.Operations.Management/main.tsp", + "specification/workloads/Workloads.Operations.Management/sdk-suppressions.yaml", + "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2023-10-01-preview/operations.json", + "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2024-02-01-preview/operations.json", + "specification/workloads/resource-manager/Microsoft.Workloads/operations/preview/2023-12-01-preview/operations.json", + "specification/workloads/resource-manager/Microsoft.Workloads/operations/stable/2024-09-01/operations.json", + "specification/workloads/resource-manager/sdk-suppressions.yaml", ]; const suppressionsFiles: String[] = filterSuppressionList(changeFiles); - expect(suppressionsFiles).toEqual(["specification/workloads/Workloads.Operations.Management/sdk-suppressions.yaml"]); + expect(suppressionsFiles).toEqual([ + "specification/workloads/Workloads.Operations.Management/sdk-suppressions.yaml", + ]); }); test("test validateSdkSuppressionsFile for sdk-suppression file", () => { const suppressionContent = { - "suppressions": { - "azure-sdk-for-go": [ - { - "package": "sdk/resourcemanager/appcontainers/armappcontainers", - "breaking-changes": [ - "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed" - ] - } - ], - "azure-sdk-for-python": [ - { - "package": "azure-mgmt-appcontainers", - "breaking-changes": [ - "Model BillingMeter no longer has parameter system_data" - ] - } - ] - } + suppressions: { + "azure-sdk-for-go": [ + { + package: "sdk/resourcemanager/appcontainers/armappcontainers", + "breaking-changes": [ + "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed", + ], + }, + ], + "azure-sdk-for-python": [ + { + package: "azure-mgmt-appcontainers", + "breaking-changes": ["Model BillingMeter no longer has parameter system_data"], + }, + ], + }, }; - + const validateResult = validateSdkSuppressionsFile(suppressionContent); - expect(validateResult).toEqual({ result: true, message: 'This suppression file is a valid yaml.' }); + expect(validateResult).toEqual({ + result: true, + message: "This suppression file is a valid yaml.", + }); }); test("test validateSdkSuppressionsFile for empty file", () => { const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); - const mockProcessExit = vi.spyOn(process, "exit").mockImplementation(() => { - throw new Error("process.exit called"); // Prevent actual exit - }); - expect(() => validateSdkSuppressionsFile(null)).toThrow("process.exit called"); + expect(validateSdkSuppressionsFile(null)).toEqual({ + result: false, + message: "This suppression file is a empty file", + }); expect(consoleSpy).toHaveBeenCalledWith("Error:", "This suppression file is a empty file"); - expect(mockProcessExit).toHaveBeenCalledWith(1); consoleSpy.mockRestore(); - mockProcessExit.mockRestore(); }); test("test validateSdkSuppressionsFile for undefined file", () => { const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); - const mockProcessExit = vi.spyOn(process, "exit").mockImplementation(() => { - throw new Error("process.exit called"); // Prevent actual exit - }); - expect(() => validateSdkSuppressionsFile(undefined)).toThrow("process.exit called"); - expect(consoleSpy).toHaveBeenCalledWith("Error:", "This suppression file is not a valid yaml. Refer to https://aka.ms/azsdk/sdk-suppression for more information."); - expect(mockProcessExit).toHaveBeenCalledWith(1); + expect(validateSdkSuppressionsFile(undefined)).toEqual({ + message: + "This suppression file is not a valid yaml. Refer to https://aka.ms/azsdk/sdk-suppression for more information.", + result: false, + }); + expect(consoleSpy).toHaveBeenCalledWith( + "Error:", + "This suppression file is not a valid yaml. Refer to https://aka.ms/azsdk/sdk-suppression for more information.", + ); consoleSpy.mockRestore(); - mockProcessExit.mockRestore(); }); test("test validateSdkSuppressionsFile for error structor file", () => { const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); - const mockProcessExit = vi.spyOn(process, "exit").mockImplementation(() => { - throw new Error("process.exit called"); // Prevent actual exit - }); const suppressionContent = { - "suppressions": { - "azure-sdk-for-go": [ - { - "package": "sdk/resourcemanager/appcontainers/armappcontainers" - } - ], - "azure-sdk-for-python": [ - { - "package": "azure-mgmt-appcontainers", - "breaking-changes": [ - "Model BillingMeter no longer has parameter system_data" - ] - } - ] - } + suppressions: { + "azure-sdk-for-go": [ + { + package: "sdk/resourcemanager/appcontainers/armappcontainers", + }, + ], + "azure-sdk-for-python": [ + { + package: "azure-mgmt-appcontainers", + "breaking-changes": ["Model BillingMeter no longer has parameter system_data"], + }, + ], + }, }; - expect(() => validateSdkSuppressionsFile(suppressionContent)).toThrow("process.exit called"); - expect(consoleSpy).toHaveBeenCalledWith("Error:", "This suppression file is a valid yaml but the schema is wrong: data/suppressions/azure-sdk-for-go/0 must have required property 'breaking-changes'"); - expect(mockProcessExit).toHaveBeenCalledWith(1); + expect(validateSdkSuppressionsFile(suppressionContent)).toEqual({ + message: + "This suppression file is a valid yaml but the schema is wrong: data/suppressions/azure-sdk-for-go/0 must have required property 'breaking-changes'", + result: false, + }); + expect(consoleSpy).toHaveBeenCalledWith( + "Error:", + "This suppression file is a valid yaml but the schema is wrong: data/suppressions/azure-sdk-for-go/0 must have required property 'breaking-changes'", + ); consoleSpy.mockRestore(); - mockProcessExit.mockRestore(); }); test("test getSdkNamesWithChangedSuppressions", () => { const headCont = { - "suppressions": { - "azure-sdk-for-python": [ - { - "package": "azure-mgmt-appcontainers", - "breaking-changes": [ - "Model BillingMeter no longer has parameter system_data AAA" - ] - } - ], - "azure-sdk-for-go": [ - { - "package": "sdk/resourcemanager/appcontainers/armappcontainers", - "breaking-changes": [ - "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed" - ] - } - ] - } + suppressions: { + "azure-sdk-for-python": [ + { + package: "azure-mgmt-appcontainers", + "breaking-changes": ["Model BillingMeter no longer has parameter system_data AAA"], + }, + ], + "azure-sdk-for-go": [ + { + package: "sdk/resourcemanager/appcontainers/armappcontainers", + "breaking-changes": [ + "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed", + ], + }, + ], + }, }; const baseCont = { - "suppressions": { + suppressions: { "azure-sdk-for-python": [ { - "package": "azure-mgmt-appcontainers", - "breaking-changes": [ - "Model BillingMeter no longer has parameter system_data" - ] - } + package: "azure-mgmt-appcontainers", + "breaking-changes": ["Model BillingMeter no longer has parameter system_data"], + }, ], "azure-sdk-for-go": [ { - "package": "sdk/resourcemanager/appcontainers/armappcontainers", + package: "sdk/resourcemanager/appcontainers/armappcontainers", "breaking-changes": [ - "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed" - ] - } - ] - } + "Field `EndTime`, `StartTime`, `Status`, `Template` of struct `JobExecution` has been removed", + ], + }, + ], + }, }; const sdkNames = getSdkNamesWithChangedSuppressions(headCont, baseCont); @@ -165,20 +170,30 @@ test("test processLabels will add new label when has sdkNames", () => { const sdkNames: string[] = ["azure-sdk-for-go", "azure-sdk-for-js"]; const presentLabels: string[] = ["aa", "BreakingChange-Go-Sdk-Suppression"]; const result = processLabels(presentLabels, sdkNames); - expect(result).toEqual({ labelsToAdd: ["BreakingChange-JavaScript-Sdk-Suppression"], labelsToRemove: [] }); - + expect(result).toEqual({ + labelsToAdd: ["BreakingChange-JavaScript-Sdk-Suppression"], + labelsToRemove: [], + }); }); test("test processLabels will remove old label when has the sdkNames not exist", () => { - const sdkNames: string[] = ["azure-sdk-for-js"]; - const presentLabels: string[] = ["aa", "BreakingChange-Go-Sdk-Suppression"]; - const result = processLabels(presentLabels, sdkNames); - expect(result).toEqual({ labelsToAdd: ["BreakingChange-JavaScript-Sdk-Suppression"], labelsToRemove: ["BreakingChange-Go-Sdk-Suppression"] }); - }); + const sdkNames: string[] = ["azure-sdk-for-js"]; + const presentLabels: string[] = ["aa", "BreakingChange-Go-Sdk-Suppression"]; + const result = processLabels(presentLabels, sdkNames); + expect(result).toEqual({ + labelsToAdd: ["BreakingChange-JavaScript-Sdk-Suppression"], + labelsToRemove: ["BreakingChange-Go-Sdk-Suppression"], + }); +}); test("test processLabels will not remove old label when has the sdkNames not exist & has corresponding suppression approved", () => { const sdkNames: string[] = ["azure-sdk-for-go"]; - const presentLabels: string[] = ["aa", "BreakingChange-Go-Sdk-Suppression", "BreakingChange-JavaScript-Sdk-Suppression", "BreakingChange-JavaScript-Sdk-Suppression-Approved"]; + const presentLabels: string[] = [ + "aa", + "BreakingChange-Go-Sdk-Suppression", + "BreakingChange-JavaScript-Sdk-Suppression", + "BreakingChange-JavaScript-Sdk-Suppression-Approved", + ]; const result = processLabels(presentLabels, sdkNames); expect(result).toEqual({ labelsToAdd: [], labelsToRemove: [] }); }); diff --git a/eng/tools/sdk-suppressions/tsconfig.json b/eng/tools/sdk-suppressions/tsconfig.json index eae537921c52..5f48d4c6a5b5 100644 --- a/eng/tools/sdk-suppressions/tsconfig.json +++ b/eng/tools/sdk-suppressions/tsconfig.json @@ -1,6 +1,9 @@ { "extends": "../tsconfig.json", "compilerOptions": { - "outDir": "./dist" - } + "outDir": "./dist", + "rootDir": ".", + "allowJs": true, + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], } diff --git a/eng/tools/spec-gen-sdk-runner/eslint.config.js b/eng/tools/spec-gen-sdk-runner/eslint.config.js index cc13ff9fc4a3..e5e61e4a84f1 100644 --- a/eng/tools/spec-gen-sdk-runner/eslint.config.js +++ b/eng/tools/spec-gen-sdk-runner/eslint.config.js @@ -88,7 +88,7 @@ const config = tseslint.config( // https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/prefer-export-from.md "unicorn/prefer-export-from": ["error", { ignoreUsedVariables: true }], }, - } + }, ); export default config; diff --git a/eng/tools/spec-gen-sdk-runner/package.json b/eng/tools/spec-gen-sdk-runner/package.json index 4dfa3e059414..af028550ce48 100644 --- a/eng/tools/spec-gen-sdk-runner/package.json +++ b/eng/tools/spec-gen-sdk-runner/package.json @@ -9,18 +9,27 @@ }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "lint": "eslint . -c eslint.config.js --report-unused-disable-directives --max-warnings 0", "lint:fix": "eslint . -c eslint.config.js --fix", "test": "vitest run", + "test:update": "vitest --update", "test:ci": "vitest run --coverage --reporter=verbose" }, "engines": { "node": ">=20.0.0" }, "devDependencies": { + "@eslint/js": "^9.21.0", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "eslint": "^9.21.0", + "eslint-plugin-unicorn": "^59.0.0", + "prettier": "~3.5.3", "typescript": "~5.8.2", + "typescript-eslint": "^8.26.0", "vitest": "^3.0.7" } } diff --git a/eng/tools/spec-gen-sdk-runner/src/change-files.ts b/eng/tools/spec-gen-sdk-runner/src/change-files.ts deleted file mode 100644 index 2e666b5885b6..000000000000 --- a/eng/tools/spec-gen-sdk-runner/src/change-files.ts +++ /dev/null @@ -1,184 +0,0 @@ -import path from "node:path"; -import { - getChangedFiles, - searchRelatedParentFolders, - searchSharedLibrary, - searchRelatedTypeSpecProjectBySharedLibrary, - groupPathsByService, - createCombinedSpecs, - type SpecResults, - type ChangedSpecs, - getLastPathSegment, -} from "./utils.js"; -import { logMessage } from "./log.js"; -import { SpecGenSdkCmdInput } from "./types.js"; - -export const readmeMdRegex = /^readme.md$/; -export const typespecProjectRegex = /^tspconfig.yaml$/; -export const typespecProjectSharedLibraryRegex = /[^/]+\.Shared/; - -export function detectChangedSpecConfigFiles(commandInput: SpecGenSdkCmdInput): ChangedSpecs[] { - const prChangedFiles: string[] = getChangedFiles(commandInput.localSpecRepoPath) ?? []; - if (prChangedFiles.length === 0) { - logMessage("No files changed in the PR"); - } - logMessage(`Changed files in the PR: ${prChangedFiles.length}`); - for (const file of prChangedFiles) { - logMessage(`\t${file}`); - } - const fileList = prChangedFiles - .filter((p) => p.startsWith("specification/")) - .filter((p) => !p.includes("/scenarios/")); - - logMessage(`Related readme.md and typespec project list:`); - const changedSpecs: ChangedSpecs[] = []; - - const readmeMDResult = searchRelatedParentFolders(fileList, { - searchFileRegex: readmeMdRegex, - specRepoFolder: commandInput.localSpecRepoPath, - stopAtFolder: "specification", - }); - - const typespecProjectResult = searchRelatedParentFolders(fileList, { - searchFileRegex: typespecProjectRegex, - specRepoFolder: commandInput.localSpecRepoPath, - stopAtFolder: "specification", - }); - - const typespecProjectSharedLibraries = searchSharedLibrary(fileList, { - searchFileRegex: typespecProjectSharedLibraryRegex, - specRepoFolder: commandInput.localSpecRepoPath, - }); - - const typespecProjectResultSearchedBySharedLibrary = searchRelatedTypeSpecProjectBySharedLibrary( - typespecProjectSharedLibraries, - { - searchFileRegex: typespecProjectRegex, - specRepoFolder: commandInput.localSpecRepoPath, - }, - ); - - // Merge typespec project results - for (const folderPath of Object.keys(typespecProjectResultSearchedBySharedLibrary)) { - if (typespecProjectResult[folderPath]) { - typespecProjectResult[folderPath] = [ - ...typespecProjectResult[folderPath], - ...typespecProjectResultSearchedBySharedLibrary[folderPath], - ]; - } else { - typespecProjectResult[folderPath] = typespecProjectResultSearchedBySharedLibrary[folderPath]; - } - } - - // Group paths by service - const serviceMap = groupPathsByService(readmeMDResult, typespecProjectResult); - - const results: SpecResults = { readmeMDResult, typespecProjectResult }; - - // Process each service - for (const [, info] of serviceMap) { - // Case: Resource Manager with .Management - if (info.managementPaths.length > 0) { - if (info.resourceManagerPaths.length === 1) { - // Single resource-manager path - match with all Management paths - const newSpecs = createCombinedSpecs( - info.resourceManagerPaths[0].path, - info.managementPaths, - results, - ); - changedSpecs.push(...newSpecs); - logMessage( - `\t readme folders: ${info.resourceManagerPaths[0].path}, tspconfig folders: ${info.managementPaths}`, - ); - for (const p of info.managementPaths) { - delete typespecProjectResult[p]; - } - delete readmeMDResult[info.resourceManagerPaths[0].path]; - } else { - // Multiple resource-manager paths - match by subfolder name - for (const rmPath of info.resourceManagerPaths) { - const matchingManagements = info.managementPaths.filter((mPath) => { - const rmSubPath = rmPath.subPath; - const managementName = getLastPathSegment(mPath).replace(".Management", ""); - return rmSubPath && rmSubPath === managementName; - }); - if (matchingManagements.length > 0) { - const newSpecs = createCombinedSpecs(rmPath.path, matchingManagements, results); - changedSpecs.push(...newSpecs); - logMessage( - `\t readme folders: ${rmPath.path}, tspconfig folders: ${matchingManagements}`, - ); - for (const p of matchingManagements) { - delete typespecProjectResult[p]; - } - delete readmeMDResult[rmPath.path]; - } - } - } - } - - // Case: Data Plane matching - if (info.dataPlanePaths.length > 0 && info.otherTypeSpecPaths.length > 0) { - if (info.dataPlanePaths.length === 1) { - // Single data-plane path - match with all non-Management TypeSpec paths - const newSpecs = createCombinedSpecs( - info.dataPlanePaths[0].path, - info.otherTypeSpecPaths, - results, - ); - changedSpecs.push(...newSpecs); - logMessage( - `\t readme folders: ${info.dataPlanePaths[0].path}, tspconfig folders: ${info.otherTypeSpecPaths}`, - ); - for (const p of info.otherTypeSpecPaths) { - delete typespecProjectResult[p]; - } - delete readmeMDResult[info.dataPlanePaths[0].path]; - } else { - // Multiple data-plane paths - match by subfolder name - for (const dpPath of info.dataPlanePaths) { - const matchingTypeSpecs = info.otherTypeSpecPaths.filter((tsPath) => { - const dpSubFolder = dpPath.subFolder; - const tsLastSegment = getLastPathSegment(tsPath); - return dpSubFolder && dpSubFolder === tsLastSegment; - }); - if (matchingTypeSpecs.length > 0) { - const newSpecs = createCombinedSpecs(dpPath.path, matchingTypeSpecs, results); - changedSpecs.push(...newSpecs); - logMessage( - `\t readme folders: ${dpPath.path}, tspconfig folders: ${matchingTypeSpecs}`, - ); - for (const p of matchingTypeSpecs) { - delete typespecProjectResult[p]; - } - delete readmeMDResult[dpPath.path]; - } - } - } - } - } - - // Process remaining unmatched paths - for (const folderPath of new Set([ - ...Object.keys(readmeMDResult), - ...Object.keys(typespecProjectResult), - ])) { - const cs: ChangedSpecs = { - specs: [], - }; - - if (typespecProjectResult[folderPath]) { - cs.specs = typespecProjectResult[folderPath]; - cs.typespecProject = path.join(folderPath, "tspconfig.yaml"); - logMessage(`\t tspconfig: ${cs.typespecProject}`); - } else { - cs.readmeMd = path.join(folderPath, "readme.md"); - cs.specs = readmeMDResult[folderPath]; - logMessage(`\t readme: ${cs.readmeMd}`); - } - - changedSpecs.push(cs); - } - - return changedSpecs; -} diff --git a/eng/tools/spec-gen-sdk-runner/src/command-helpers.ts b/eng/tools/spec-gen-sdk-runner/src/command-helpers.ts new file mode 100644 index 000000000000..e9b7ee06cc5e --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/src/command-helpers.ts @@ -0,0 +1,373 @@ +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { + findReadmeFiles, + getArgumentValue, + getAllTypeSpecPaths, + objectToMap, + SpecConfigs, +} from "./utils.js"; +import { LogIssueType, LogLevel, logMessage, setVsoVariable, vsoLogIssue } from "./log.js"; +import { + APIViewRequestData, + SdkName, + SpecGenSdkArtifactInfo, + SpecGenSdkCmdInput, + SpecGenSdkRequiredSettings, + VsoLogs, +} from "./types.js"; +import { groupSpecConfigPaths } from "./spec-helpers.js"; + +/** + * Load execution-report.json. + * @param commandInput - The command input. + * @returns the execution report JSON + */ +export function getExecutionReport(commandInput: SpecGenSdkCmdInput): any { + // Read the execution report to determine if the generation was successful + const executionReportPath = path.join( + commandInput.workingFolder, + `${commandInput.sdkRepoName}_tmp/execution-report.json`, + ); + return JSON.parse(fs.readFileSync(executionReportPath, "utf8")); +} + +/** + * Set the pipeline variables for the SDK pull request. + * @param stagedArtifactsFolder - The staged artifacts folder. + * @param skipPrVariables - A flag indicating whether to skip setting PR variables. + * @param packageName - The package name. + * @param installationInstructions - The installation instructions. + */ +export function setPipelineVariables( + stagedArtifactsFolder: string, + skipPrVariables: boolean = true, + packageName: string = "", + installationInstructions: string = "", +): void { + if (!skipPrVariables) { + const branchName = `sdkauto/${packageName?.replace("/", "-")}`; + const prTitle = `[AutoPR ${packageName}]`; + const prBody = installationInstructions; + setVsoVariable("PrBranch", branchName); + setVsoVariable("PrTitle", prTitle); + setVsoVariable("PrBody", prBody); + } + setVsoVariable("StagedArtifactsFolder", stagedArtifactsFolder); +} + +/** + * Parse the arguments. + * @returns The spec-gen-sdk command input. + */ +export function parseArguments(): SpecGenSdkCmdInput { + const __filename: string = fileURLToPath(import.meta.url); + const __dirname: string = path.dirname(__filename); + + // Get the arguments passed to the script + const args: string[] = process.argv.slice(2); + const localSpecRepoPath: string = path.resolve( + getArgumentValue(args, "--scp", path.join(__dirname, "..", "..")), + ); + const sdkRepoName: string = getArgumentValue(args, "--lang", "azure-sdk-for-net"); + const localSdkRepoPath: string = path.resolve( + getArgumentValue(args, "--sdp", path.join(localSpecRepoPath, "..", sdkRepoName)), + ); + const workingFolder: string = path.resolve( + getArgumentValue(args, "--wf", path.join(localSpecRepoPath, "..")), + ); + + // Set runMode to "release" by default + let runMode = "release"; + const batchType: string = getArgumentValue(args, "--batch-type", ""); + const pullRequestNumber: string = getArgumentValue(args, "--pr-number", ""); + if (batchType) { + runMode = "batch"; + } else if (pullRequestNumber) { + runMode = "spec-pull-request"; + } + + return { + workingFolder, + localSpecRepoPath, + localSdkRepoPath, + sdkRepoName, + sdkLanguage: sdkRepoName.replace("-pr", ""), + runMode, + tspConfigPath: getArgumentValue(args, "--tsp-config-relative-path", ""), + readmePath: getArgumentValue(args, "--readme-relative-path", ""), + prNumber: getArgumentValue(args, "--pr-number", ""), + apiVersion: getArgumentValue(args, "--api-version", ""), + sdkReleaseType: getArgumentValue(args, "--sdk-release-type", ""), + specCommitSha: getArgumentValue(args, "--commit", "HEAD"), + specRepoHttpsUrl: getArgumentValue(args, "--spec-repo-url", ""), + headRepoHttpsUrl: getArgumentValue(args, "--head-repo-url", ""), + headBranch: getArgumentValue(args, "--head-branch", ""), + }; +} + +/** + * Prepare the spec-gen-sdk command. + * @param commandInput The command input. + * @returns The spec-gen-sdk command. + */ +export function prepareSpecGenSdkCommand(commandInput: SpecGenSdkCmdInput): string[] { + const specGenSdkCommand = []; + specGenSdkCommand.push( + "spec-gen-sdk", + "--scp", + commandInput.localSpecRepoPath, + "--sdp", + commandInput.localSdkRepoPath, + "--wf", + commandInput.workingFolder, + "-l", + commandInput.sdkRepoName, + "-c", + commandInput.specCommitSha, + "--rm", + commandInput.runMode, + ); + if (commandInput.specRepoHttpsUrl) { + specGenSdkCommand.push("--spec-repo-https-url", commandInput.specRepoHttpsUrl); + } + if (commandInput.prNumber) { + specGenSdkCommand.push("--pr-number", commandInput.prNumber); + } + if (commandInput.tspConfigPath) { + specGenSdkCommand.push("--tsp-config-relative-path", commandInput.tspConfigPath); + } + if (commandInput.readmePath) { + specGenSdkCommand.push("--readme-relative-path", commandInput.readmePath); + } + if (commandInput.headRepoHttpsUrl) { + specGenSdkCommand.push("--head-repo-url", commandInput.headRepoHttpsUrl); + } + if (commandInput.headBranch) { + specGenSdkCommand.push("--head-branch", commandInput.headBranch); + } + if (commandInput.apiVersion) { + specGenSdkCommand.push("--api-version", commandInput.apiVersion); + } + if (commandInput.sdkReleaseType) { + specGenSdkCommand.push("--sdk-release-type", commandInput.sdkReleaseType); + } + return specGenSdkCommand; +} + +/** + * Get the spec paths based on the batch run type. + * @param batchType The batch run type. + * @param specRepoPath The specification repository path. + * @returns The specConfigs array. + */ +export function getSpecPaths(batchType: string, specRepoPath: string): SpecConfigs[] { + let tspconfigs: string[] = []; + let readmes: string[] = []; + let skipUnmatchedReadmes = false; + switch (batchType) { + case "all-specs": { + tspconfigs = getAllTypeSpecPaths(specRepoPath); + readmes = findReadmeFiles(path.join(specRepoPath, "specification")); + break; + } + case "all-typespecs": { + tspconfigs = getAllTypeSpecPaths(specRepoPath); + readmes = findReadmeFiles(path.join(specRepoPath, "specification")); + skipUnmatchedReadmes = true; + break; + } + case "all-mgmtplane-typespecs": { + tspconfigs = getAllTypeSpecPaths(specRepoPath).filter((p) => p.includes(".Management")); + readmes = findReadmeFiles(path.join(specRepoPath, "specification")).filter((p) => + p.includes("resource-manager"), + ); + skipUnmatchedReadmes = true; + break; + } + case "all-dataplane-typespecs": { + tspconfigs = getAllTypeSpecPaths(specRepoPath).filter((p) => !p.includes(".Management")); + readmes = findReadmeFiles(path.join(specRepoPath, "specification")).filter((p) => + p.includes("data-plane"), + ); + skipUnmatchedReadmes = true; + break; + } + case "all-openapis": { + readmes = findReadmeFiles(path.join(specRepoPath, "specification")); + break; + } + case "all-mgmtplane-openapis": { + readmes = findReadmeFiles(path.join(specRepoPath, "specification")).filter((p) => + p.includes("resource-manager"), + ); + break; + } + case "all-dataplane-openapis": { + readmes = findReadmeFiles(path.join(specRepoPath, "specification")).filter((p) => + p.includes("data-plane"), + ); + break; + } + case "sample-typespecs": { + tspconfigs = [ + "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ]; + } + } + + return groupSpecConfigPaths(tspconfigs, readmes, skipUnmatchedReadmes); +} + +/** + * Logs issues to Azure DevOps Pipeline * + * @param logPath - The vso log file path. + * @param specConfigDisplayText - The display text for the spec configuration. + */ +export function logIssuesToPipeline(logPath: string, specConfigDisplayText: string): void { + let vsoLogs: VsoLogs; + try { + const logContent = JSON.parse(fs.readFileSync(logPath, "utf8")); + vsoLogs = objectToMap(logContent); + } catch (error) { + throw new Error(`Runner: error reading log at ${logPath}:${error}`); + } + + if (vsoLogs) { + const errors = [...vsoLogs.values()].flatMap((entry) => entry.errors ?? []); + const warnings = [...vsoLogs.values()].flatMap((entry) => entry.warnings ?? []); + if (errors.length > 0) { + const errorTitle = + `Errors occurred while generating SDK from ${specConfigDisplayText}. ` + + `Follow the steps at https://aka.ms/azsdk/sdk-automation-faq#how-to-view-the-detailed-sdk-generation-errors to view detailed errors.`; + logMessage(errorTitle, LogLevel.Group); + const errorsWithTitle = [errorTitle, ...errors]; + vsoLogIssue(errorsWithTitle.join("%0D%0A")); + logMessage("ending group logging", LogLevel.EndGroup); + } + if (warnings.length > 0) { + const warningTitle = + `Warnings occurred while generating SDK from ${specConfigDisplayText}. ` + + `Follow the steps at https://aka.ms/azsdk/sdk-automation-faq#how-to-view-the-detailed-sdk-generation-errors to view detailed warnings.`; + logMessage(warningTitle, LogLevel.Group); + const warningsWithTitle = [warningTitle, ...warnings]; + vsoLogIssue(warningsWithTitle.join("%0D%0A"), LogIssueType.Warning); + logMessage("ending group logging", LogLevel.EndGroup); + } + } +} + +/** + * Process the breaking change label artifacts. + * + * @param executionReport - The spec-gen-sdk execution report. + * @returns flag of lable breaking change. + */ +export function getBreakingChangeInfo(executionReport: any): boolean { + for (const packageInfo of executionReport.packages) { + if (packageInfo.shouldLabelBreakingChange) { + return true; + } + } + return false; +} + +/** + * Generate the spec-gen-sdk artifacts. + * @param commandInput - The command input. + * @param result - The spec-gen-sdk execution result. + * @param hasBreakingChange - A flag indicating whether there are breaking changes. + * @param hasManagementPlaneSpecs - A flag indicating whether there are management plane specs. + * @param stagedArtifactsFolder - The staged artifacts folder. + * @param apiViewRequestData - The API view request data. + * @param sdkGenerationExecuted - A flag indicating whether the SDK generation was executed. + * @returns the run status code. + */ +export function generateArtifact( + commandInput: SpecGenSdkCmdInput, + result: string, + hasBreakingChange: boolean, + hasManagementPlaneSpecs: boolean, + stagedArtifactsFolder: string, + apiViewRequestData: APIViewRequestData[], + sdkGenerationExecuted: boolean = true, +): number { + const specGenSdkArtifactName = "spec-gen-sdk-artifact"; + const specGenSdkArtifactFileName = specGenSdkArtifactName + ".json"; + const specGenSdkArtifactPath = "out/spec-gen-sdk-artifact"; + const specGenSdkArtifactAbsoluteFolder = path.join( + commandInput.workingFolder, + specGenSdkArtifactPath, + ); + try { + if (!fs.existsSync(specGenSdkArtifactAbsoluteFolder)) { + fs.mkdirSync(specGenSdkArtifactAbsoluteFolder, { recursive: true }); + } + let isSpecGenSdkCheckRequired = false; + if (sdkGenerationExecuted) { + isSpecGenSdkCheckRequired = getRequiredSettingValue( + hasManagementPlaneSpecs, + commandInput.sdkLanguage as SdkName, + ); + } + + // Write artifact + const artifactInfo: SpecGenSdkArtifactInfo = { + language: commandInput.sdkLanguage, + result, + prNumber: commandInput.prNumber, + labelAction: hasBreakingChange, + isSpecGenSdkCheckRequired, + apiViewRequestData: apiViewRequestData, + }; + fs.writeFileSync( + path.join(commandInput.workingFolder, specGenSdkArtifactPath, specGenSdkArtifactFileName), + JSON.stringify(artifactInfo, undefined, 2), + ); + setVsoVariable("SpecGenSdkArtifactName", specGenSdkArtifactName); + setVsoVariable("SpecGenSdkArtifactPath", specGenSdkArtifactPath); + setVsoVariable("StagedArtifactsFolder", stagedArtifactsFolder); + setVsoVariable("HasAPIViewArtifact", apiViewRequestData.length > 0 ? "true" : "false"); + } catch (error) { + logMessage("Runner: errors occurred while processing breaking change", LogLevel.Group); + vsoLogIssue(`Runner: errors writing breaking change label artifacts:${error}`); + logMessage("ending group logging", LogLevel.EndGroup); + return 1; + } + return 0; +} + +/** + * Get the service folder path from the spec config path. + * @param specConfigPath + * @returns The service folder path. + */ +export function getServiceFolderPath(specConfigPath: string): string { + if (!specConfigPath || specConfigPath.length === 0) { + return ""; + } + const segments = specConfigPath.split("/"); + if (segments.length > 2) { + return `${segments[0]}/${segments[1]}`; + } + return specConfigPath; +} + +/** + * Get the required setting value for the SDK check based on the spec PR types. + * @param hasManagementPlaneSpecs - A flag indicating whether there are management plane specs. + * @param sdkName - The SDK name. + * @returns boolean indicating whether the SDK check is required. + */ +export function getRequiredSettingValue( + hasManagementPlaneSpecs: boolean, + sdkName: SdkName, +): boolean { + if (hasManagementPlaneSpecs) { + return SpecGenSdkRequiredSettings[sdkName].managementPlane; + } else { + return SpecGenSdkRequiredSettings[sdkName].dataPlane; + } +} diff --git a/eng/tools/spec-gen-sdk-runner/src/commands.ts b/eng/tools/spec-gen-sdk-runner/src/commands.ts index d7336a9ba39a..5bcb0a27df60 100644 --- a/eng/tools/spec-gen-sdk-runner/src/commands.ts +++ b/eng/tools/spec-gen-sdk-runner/src/commands.ts @@ -1,24 +1,20 @@ import fs from "node:fs"; import path from "node:path"; -import { fileURLToPath } from "node:url"; +import { runSpecGenSdkCommand, resetGitRepo, SpecConfigs } from "./utils.js"; +import { LogLevel, logMessage, vsoAddAttachment, vsoLogIssue } from "./log.js"; +import { APIViewRequestData, SpecGenSdkCmdInput } from "./types.js"; +import { detectChangedSpecConfigFiles } from "./spec-helpers.js"; import { - findReadmeFiles, - getArgumentValue, - runSpecGenSdkCommand, - getAllTypeSpecPaths, - resetGitRepo, - objectToMap, -} from "./utils.js"; -import { - LogIssueType, - LogLevel, - logMessage, - setVsoVariable, - vsoAddAttachment, - vsoLogIssue, -} from "./log.js"; -import { SpecGenSdkCmdInput, VsoLogs } from "./types.js"; -import { detectChangedSpecConfigFiles } from "./change-files.js"; + generateArtifact, + getBreakingChangeInfo, + getExecutionReport, + getServiceFolderPath, + getSpecPaths, + logIssuesToPipeline, + parseArguments, + prepareSpecGenSdkCommand, + setPipelineVariables, +} from "./command-helpers.js"; /** * Generate SDK for a single spec. @@ -63,11 +59,16 @@ export async function generateSdkForSingleSpec(): Promise { "missing-package-name"; packageName = packageName.replace("/", "-"); const installationInstructions = executionReport.packages[0]?.installationInstructions; - setPipelineVariables(packageName, installationInstructions); + setPipelineVariables( + executionReport.stagedArtifactsFolder, + false, + packageName, + installationInstructions, + ); } logMessage("ending group logging", LogLevel.EndGroup); - logIssuesToPipeline(executionReport.vsoLogPath, specConfigPathText); + logIssuesToPipeline(executionReport?.vsoLogPath, specConfigPathText); return statusCode; } @@ -83,10 +84,22 @@ export async function generateSdkForSpecPr(): Promise { let statusCode = 0; let pushedSpecConfigCount; - let shouldLabelBreakingChange = false; - let breakingChangeLabel = ""; let executionReport; let changedSpecPathText = ""; + let hasManagementPlaneSpecs = false; + let overallRunHasBreakingChange = false; + let currentRunHasBreakingChange = false; + let sdkGenerationExecuted = true; + let overallExecutionResult = ""; + let currentExecutionResult = ""; + let stagedArtifactsFolder = ""; + const apiViewRequestData: APIViewRequestData[] = []; + + if (changedSpecs.length === 0) { + sdkGenerationExecuted = false; + overallExecutionResult = "succeeded"; + } + for (const changedSpec of changedSpecs) { if (!changedSpec.typespecProject && !changedSpec.readmeMd) { logMessage("Runner: no spec config file found in the changed files", LogLevel.Warn); @@ -98,11 +111,21 @@ export async function generateSdkForSpecPr(): Promise { specGenSdkCommand.push("--tsp-config-relative-path", changedSpec.typespecProject); changedSpecPathText = changedSpec.typespecProject; pushedSpecConfigCount++; + if (changedSpec.typespecProject.includes(".Management")) { + hasManagementPlaneSpecs = true; + } } if (changedSpec.readmeMd) { specGenSdkCommand.push("--readme-relative-path", changedSpec.readmeMd); - changedSpecPathText = changedSpecPathText + " " + changedSpec.readmeMd; + changedSpecPathText = changedSpec.readmeMd; pushedSpecConfigCount++; + if (pushedSpecConfigCount === 2) { + // If both readme and tspconfig are provided, we need to use the service folder path for the log message + changedSpecPathText = getServiceFolderPath(changedSpec.readmeMd); + } + if (changedSpec.readmeMd.includes("resource-manager")) { + hasManagementPlaneSpecs = true; + } } logMessage(`Generating SDK from ${changedSpecPathText}`, LogLevel.Group); logMessage(`Runner command:${specGenSdkCommand.join(" ")}`); @@ -121,24 +144,46 @@ export async function generateSdkForSpecPr(): Promise { } try { - // Read the execution report to determine if the generation was successful + // Read the execution report to aggreate the generation results executionReport = getExecutionReport(commandInput); - const executionResult = executionReport.executionResult; - [shouldLabelBreakingChange, breakingChangeLabel] = getBreakingChangeInfo(executionReport); - logMessage(`Runner command execution result:${executionResult}`); + currentExecutionResult = executionReport.executionResult; + + if (executionReport.stagedArtifactsFolder) { + stagedArtifactsFolder = executionReport.stagedArtifactsFolder; + for (const pkg of executionReport.packages) { + if (pkg.apiViewArtifact) { + apiViewRequestData.push({ + packageName: pkg.packageName, + filePath: path.relative(stagedArtifactsFolder, pkg.apiViewArtifact), + }); + } + } + } + + if (overallExecutionResult !== "failed") { + overallExecutionResult = currentExecutionResult; + } + currentRunHasBreakingChange = getBreakingChangeInfo(executionReport); + overallRunHasBreakingChange = overallRunHasBreakingChange || currentRunHasBreakingChange; + logMessage(`Runner command execution result:${currentExecutionResult}`); } catch (error) { logMessage(`Runner: error reading execution-report.json:${error}`, LogLevel.Error); statusCode = 1; + overallExecutionResult = "failed"; } logMessage("ending group logging", LogLevel.EndGroup); - logIssuesToPipeline(executionReport.vsoLogPath, changedSpecPathText); + logIssuesToPipeline(executionReport?.vsoLogPath, changedSpecPathText); } - // Process the breaking change label artifacts + // Process the spec-gen-sdk artifacts statusCode = - processBreakingChangeLabelArtifacts( + generateArtifact( commandInput, - shouldLabelBreakingChange, - breakingChangeLabel, + overallExecutionResult, + overallRunHasBreakingChange, + hasManagementPlaneSpecs, + stagedArtifactsFolder, + apiViewRequestData, + sdkGenerationExecuted, ) || statusCode; return statusCode; } @@ -146,34 +191,65 @@ export async function generateSdkForSpecPr(): Promise { /** * Generate SDKs for batch specs. */ -export async function generateSdkForBatchSpecs(runMode: string): Promise { +export async function generateSdkForBatchSpecs(batchType: string): Promise { // Parse the arguments const commandInput: SpecGenSdkCmdInput = parseArguments(); // Construct the spec-gen-sdk command const specGenSdkCommand = prepareSpecGenSdkCommand(commandInput); - // Get the spec paths based on the run mode - const specConfigPaths = getSpecPaths(runMode, commandInput.localSpecRepoPath); + if ( + batchType === "all-typespecs" || + batchType === "all-mgmtplane-typespecs" || + batchType === "all-dataplane-typespecs" + ) { + specGenSdkCommand.push("--skip-sdk-gen-from-openapi", "true"); + } + + // Get the spec paths based on the batch run type + const specConfigsArray: SpecConfigs[] = getSpecPaths(batchType, commandInput.localSpecRepoPath); // Prepare variables let statusCode = 0; + let pushedSpecConfigCount; let markdownContent = "\n"; + markdownContent += `## Batch Run Type\n ${batchType}\n`; let failedContent = `## Spec Failures in the Generation Process\n`; let succeededContent = `## Successful Specs in the Generation Process\n`; let notEnabledContent = `## Specs with SDK Not Enabled\n`; + let duplicatedConfigContent = `## Specs with Duplicated SDK Configurations (in 'tspconfig.yaml' and 'readme.md')\n`; let failedCount = 0; let notEnabledCount = 0; + let duplicatedConfigCount = 0; let succeededCount = 0; let executionReport; + let specConfigPath = ""; + let stagedArtifactsFolder = ""; + let serviceFolderPath = ""; // Generate SDKs for each spec - for (const specConfigPath of specConfigPaths) { - logMessage(`Generating SDK from ${specConfigPath}`, LogLevel.Group); + for (const specConfigs of specConfigsArray) { + if (specConfigs.tspconfigPath && specConfigs.readmePath) { + serviceFolderPath = getServiceFolderPath(specConfigs.tspconfigPath); + logMessage(`Generating SDK from ${serviceFolderPath}`, LogLevel.Group); + } else if (specConfigs.tspconfigPath) { + logMessage(`Generating SDK from ${specConfigs.tspconfigPath}`, LogLevel.Group); + } else if (specConfigs.readmePath) { + logMessage(`Generating SDK from ${specConfigs.readmePath}`, LogLevel.Group); + } + pushedSpecConfigCount = 0; + if (specConfigs.readmePath) { + specConfigPath = specConfigs.readmePath; + specGenSdkCommand.push("--readme-relative-path", specConfigs.readmePath); + pushedSpecConfigCount++; + } - if (specConfigPath.endsWith("tspconfig.yaml")) { - specGenSdkCommand.push("--tsp-config-relative-path", specConfigPath); - } else { - specGenSdkCommand.push("--readme-relative-path", specConfigPath); + if (specConfigs.tspconfigPath) { + // Override specConfigPath variable for reporting purposes + // as we only input both tspconfig and readme while selecting typespec options for batch runs + specConfigPath = specConfigs.tspconfigPath; + specGenSdkCommand.push("--tsp-config-relative-path", specConfigs.tspconfigPath); + pushedSpecConfigCount++; } + logMessage(`Runner command:${specGenSdkCommand.join(" ")}`); try { await resetGitRepo(commandInput.localSdkRepoPath); @@ -184,14 +260,18 @@ export async function generateSdkForBatchSpecs(runMode: string): Promise statusCode = 1; } - // Pop the spec config path from the command - specGenSdkCommand.pop(); - specGenSdkCommand.pop(); + // Pop the spec config path from specGenSdkCommand + for (let index = 0; index < pushedSpecConfigCount * 2; index++) { + specGenSdkCommand.pop(); + } try { // Read the execution report to determine if the generation was successful executionReport = getExecutionReport(commandInput); const executionResult = executionReport.executionResult; + if (executionReport.stagedArtifactsFolder) { + stagedArtifactsFolder = executionReport.stagedArtifactsFolder; + } logMessage(`Runner: command execution result:${executionResult}`); if (executionResult === "succeeded" || executionResult === "warning") { @@ -204,12 +284,21 @@ export async function generateSdkForBatchSpecs(runMode: string): Promise failedContent += `${specConfigPath},`; failedCount++; } + // Check for duplicated SDK configurations, + // the execution result can be "succeeded" or "warning" + if (executionReport.isSdkConfigDuplicated) { + duplicatedConfigContent += `${specConfigPath},`; + duplicatedConfigCount++; + } } catch (error) { logMessage(`Runner: error reading execution-report.json:${error}`, LogLevel.Error); statusCode = 1; } logMessage("ending group logging", LogLevel.EndGroup); - logIssuesToPipeline(executionReport.vsoLogPath, specConfigPath); + if (specConfigs.tspconfigPath && specConfigs.readmePath) { + specConfigPath = serviceFolderPath; + } + logIssuesToPipeline(executionReport?.vsoLogPath, specConfigPath); } if (failedCount > 0) { markdownContent += `${failedContent}\n`; @@ -217,6 +306,9 @@ export async function generateSdkForBatchSpecs(runMode: string): Promise if (notEnabledCount > 0) { markdownContent += `${notEnabledContent}\n`; } + if (duplicatedConfigCount > 0) { + markdownContent += `${duplicatedConfigContent}\n`; + } if (succeededCount > 0) { markdownContent += `${succeededContent}\n`; } @@ -224,8 +316,11 @@ export async function generateSdkForBatchSpecs(runMode: string): Promise markdownContent += notEnabledCount ? `## Total Specs with SDK not enabled in the Configuration\n ${notEnabledCount}\n` : ""; + markdownContent += duplicatedConfigCount + ? `## Total Specs with Duplicated SDK Configurations\n ${duplicatedConfigCount}\n` + : ""; markdownContent += succeededCount ? `## Total Successful Specs\n ${succeededCount}\n` : ""; - markdownContent += `## Total Specs Count\n ${specConfigPaths.length}\n\n`; + markdownContent += `## Total Specs Count\n ${specConfigsArray.length}\n\n`; // Write the markdown content to a file const markdownFilePath = path.join(commandInput.workingFolder, "out/logs/generation-summary.md"); @@ -240,251 +335,8 @@ export async function generateSdkForBatchSpecs(runMode: string): Promise vsoLogIssue(`Runner: error writing markdown file ${markdownFilePath}:${error}`); statusCode = 1; } - return statusCode; -} - -/** - * Load execution-report.json. - * @param commandInput - The command input. - * @returns the execution report JSON - */ -function getExecutionReport(commandInput: SpecGenSdkCmdInput): any { - // Read the execution report to determine if the generation was successful - const executionReportPath = path.join( - commandInput.workingFolder, - `${commandInput.sdkRepoName}_tmp/execution-report.json`, - ); - return JSON.parse(fs.readFileSync(executionReportPath, "utf8")); -} - -/** - * Set the pipeline variables for the SDK pull request. - * @param packageName - The package name. - * @param installationInstructions - The installation instructions. - */ -function setPipelineVariables(packageName: string, installationInstructions: string = ""): void { - const branchName = `sdkauto/${packageName?.replace("/", "_")}`; - const prTitle = `[AutoPR ${packageName}]`; - const prBody = installationInstructions; - setVsoVariable("PrBranch", branchName); - setVsoVariable("PrTitle", prTitle); - setVsoVariable("PrBody", prBody); -} -/** - * Parse the arguments. - * @returns The spec-gen-sdk command input. - */ -function parseArguments(): SpecGenSdkCmdInput { - const __filename: string = fileURLToPath(import.meta.url); - const __dirname: string = path.dirname(__filename); - - // Get the arguments passed to the script - const args: string[] = process.argv.slice(2); - const localSpecRepoPath: string = path.resolve( - getArgumentValue(args, "--scp", path.join(__dirname, "..", "..")), - ); - const sdkRepoName: string = getArgumentValue(args, "--lang", "azure-sdk-for-net"); - const localSdkRepoPath: string = path.resolve( - getArgumentValue(args, "--sdp", path.join(localSpecRepoPath, "..", sdkRepoName)), - ); - const workingFolder: string = path.resolve( - getArgumentValue(args, "--wf", path.join(localSpecRepoPath, "..")), - ); - return { - workingFolder, - localSpecRepoPath, - localSdkRepoPath, - sdkRepoName, - isTriggeredByPipeline: getArgumentValue(args, "--tr", "false"), - tspConfigPath: getArgumentValue(args, "--tsp-config-relative-path", ""), - readmePath: getArgumentValue(args, "--readme-relative-path", ""), - prNumber: getArgumentValue(args, "--pr-number", ""), - apiVersion: getArgumentValue(args, "--api-version", ""), - sdkReleaseType: getArgumentValue(args, "--sdk-release-type", ""), - specCommitSha: getArgumentValue(args, "--commit", "HEAD"), - specRepoHttpsUrl: getArgumentValue(args, "--spec-repo-url", ""), - headRepoHttpsUrl: getArgumentValue(args, "--head-repo-url", ""), - headBranch: getArgumentValue(args, "--head-branch", ""), - }; -} - -/** - * Prepare the spec-gen-sdk command. - * @param commandInput The command input. - * @returns The spec-gen-sdk command. - */ -function prepareSpecGenSdkCommand(commandInput: SpecGenSdkCmdInput): string[] { - const specGenSdkCommand = []; - specGenSdkCommand.push( - "spec-gen-sdk", - "--scp", - commandInput.localSpecRepoPath, - "--sdp", - commandInput.localSdkRepoPath, - "--wf", - commandInput.workingFolder, - "-l", - commandInput.sdkRepoName, - "-c", - commandInput.specCommitSha, - "-t", - commandInput.isTriggeredByPipeline, - ); - if (commandInput.specRepoHttpsUrl) { - specGenSdkCommand.push("--spec-repo-url", commandInput.specRepoHttpsUrl); - } - if (commandInput.prNumber) { - specGenSdkCommand.push("--pr-number", commandInput.prNumber); - } - if (commandInput.tspConfigPath) { - specGenSdkCommand.push("--tsp-config-relative-path", commandInput.tspConfigPath); - } - if (commandInput.readmePath) { - specGenSdkCommand.push("--readme-relative-path", commandInput.readmePath); - } - if (commandInput.headRepoHttpsUrl) { - specGenSdkCommand.push("--head-repo-url", commandInput.headRepoHttpsUrl); - } - if (commandInput.headBranch) { - specGenSdkCommand.push("--head-branch", commandInput.headBranch); - } - if (commandInput.apiVersion) { - specGenSdkCommand.push("--api-version", commandInput.apiVersion); - } - if (commandInput.sdkReleaseType) { - specGenSdkCommand.push("--sdk-release-type", commandInput.sdkReleaseType); - } - return specGenSdkCommand; -} - -/** - * Get the spec paths based on the run mode. - * @param runMode The run mode. - * @param specRepoPath The specification repository path. - * @returns The spec paths. - */ -function getSpecPaths(runMode: string, specRepoPath: string): string[] { - const specConfigPaths: string[] = []; - switch (runMode) { - case "all-specs": { - specConfigPaths.push( - ...getAllTypeSpecPaths(specRepoPath), - ...findReadmeFiles(path.join(specRepoPath, "specification")), - ); - break; - } - case "all-typespecs": { - specConfigPaths.push(...getAllTypeSpecPaths(specRepoPath)); - break; - } - case "all-openapis": { - specConfigPaths.push(...findReadmeFiles(path.join(specRepoPath, "specification"))); - break; - } - case "sample-typespecs": { - specConfigPaths.push( - "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", - "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", - ); - } - } - return specConfigPaths; -} + // Set the pipeline variables for artifacts location + setPipelineVariables(stagedArtifactsFolder); -/** - * Logs issues to Azure DevOps Pipeline * - * @param logPath - The vso log file path. - * @param specConfigDisplayText - The display text for the spec configuration. - */ -function logIssuesToPipeline(logPath: string, specConfigDisplayText: string): void { - let vsoLogs: VsoLogs; - try { - const logContent = JSON.parse(fs.readFileSync(logPath, "utf8")); - vsoLogs = objectToMap(logContent); - } catch (error) { - throw new Error(`Runner: error reading log at ${logPath}:${error}`); - } - - if (vsoLogs) { - const errors = [...vsoLogs.values()].flatMap((entry) => entry.errors ?? []); - const warnings = [...vsoLogs.values()].flatMap((entry) => entry.warnings ?? []); - if (errors.length > 0) { - const errorTitle = `Errors occurred while generating SDK from ${specConfigDisplayText}`; - logMessage(errorTitle, LogLevel.Group); - const errorsWithTitle = [errorTitle, ...errors]; - vsoLogIssue(errorsWithTitle.join("%0D%0A")); - logMessage("ending group logging", LogLevel.EndGroup); - } - if (warnings.length > 0) { - const warningTitle = `Warnings occurred while generating SDK from ${specConfigDisplayText}`; - logMessage(warningTitle, LogLevel.Group); - const warningsWithTitle = [warningTitle, ...warnings]; - vsoLogIssue(warningsWithTitle.join("%0D%0A"), LogIssueType.Warning); - logMessage("ending group logging", LogLevel.EndGroup); - } - } -} - -/** - * Process the breaking change label artifacts. - * - * @param executionReport - The spec-gen-sdk execution report. - * @returns [flag of lable breaking change, breaking change label]. - */ -function getBreakingChangeInfo(executionReport: any): [boolean, string] { - let breakingChangeLabel = ""; - for (const packageInfo of executionReport.packages) { - breakingChangeLabel = packageInfo.breakingChangeLabel; - if (packageInfo.shouldLabelBreakingChange) { - return [true, breakingChangeLabel]; - } - } - return [false, breakingChangeLabel]; -} - -/** - * Process the breaking change label artifacts. - * @param commandInput - The command input. - * @param shouldLabelBreakingChange - A flag indicating whether to label breaking changes. - * @returns the run status code. - */ -function processBreakingChangeLabelArtifacts( - commandInput: SpecGenSdkCmdInput, - shouldLabelBreakingChange: boolean, - breakingChangeLabel: string, -): number { - const breakingChangeLabelArtifactName = "spec-gen-sdk-breaking-change-artifact"; - const breakingChangeLabelArtifactFileName = breakingChangeLabelArtifactName + ".json"; - const breakingChangeLabelArtifactPath = "out/breaking-change-label-artifact"; - const breakingChangeLabelArtifactAbsoluteFolder = path.join( - commandInput.workingFolder, - breakingChangeLabelArtifactPath, - ); - try { - if (!fs.existsSync(breakingChangeLabelArtifactAbsoluteFolder)) { - fs.mkdirSync(breakingChangeLabelArtifactAbsoluteFolder, { recursive: true }); - } - // Write breaking change label artifact - fs.writeFileSync( - path.join( - commandInput.workingFolder, - breakingChangeLabelArtifactPath, - breakingChangeLabelArtifactFileName, - ), - JSON.stringify({ - language: commandInput.sdkRepoName, - labelAction: shouldLabelBreakingChange, - }), - ); - setVsoVariable("BreakingChangeLabelArtifactName", breakingChangeLabelArtifactName); - setVsoVariable("BreakingChangeLabelArtifactPath", breakingChangeLabelArtifactPath); - setVsoVariable("BreakingChangeLabelAction", shouldLabelBreakingChange ? "add" : "remove"); - setVsoVariable("BreakingChangeLabel", breakingChangeLabel); - } catch (error) { - logMessage("Runner: errors occurred while processing breaking change", LogLevel.Group); - vsoLogIssue(`Runner: errors writing breaking change label artifacts:${error}`); - logMessage("ending group logging", LogLevel.EndGroup); - return 1; - } - return 0; + return statusCode; } diff --git a/eng/tools/spec-gen-sdk-runner/src/index.ts b/eng/tools/spec-gen-sdk-runner/src/index.ts index 9b1914d17d1e..a66fe5eaa8fe 100644 --- a/eng/tools/spec-gen-sdk-runner/src/index.ts +++ b/eng/tools/spec-gen-sdk-runner/src/index.ts @@ -1,4 +1,6 @@ import { exit } from "node:process"; +import path from "node:path"; +import { existsSync, mkdirSync } from "node:fs"; import { getArgumentValue } from "./utils.js"; import { generateSdkForBatchSpecs, @@ -11,15 +13,24 @@ export async function main() { const args: string[] = process.argv.slice(2); // Log the arguments to the console console.log("Arguments passed to the script:", args.join(" ")); - const runMode: string = getArgumentValue(args, "--rm", ""); + const batchType: string = getArgumentValue(args, "--batch-type", ""); const pullRequestNumber: string = getArgumentValue(args, "--pr-number", ""); + console.log("Current working directory:", process.cwd()); + const workingFolder: string = getArgumentValue(args, "--wf", path.join(process.cwd(), "..")); + const logFolder = path.join(workingFolder, "out/logs"); + if (!existsSync(logFolder)) { + mkdirSync(logFolder, { recursive: true }); + } let statusCode = 0; - if (runMode) { - statusCode = await generateSdkForBatchSpecs(runMode); + if (batchType) { + statusCode = await generateSdkForBatchSpecs(batchType); } else if (pullRequestNumber) { statusCode = await generateSdkForSpecPr(); } else { statusCode = await generateSdkForSingleSpec(); } + if (statusCode !== 0) { + console.log("##vso[task.complete result=Failed;]"); + } exit(statusCode); } diff --git a/eng/tools/spec-gen-sdk-runner/src/spec-helpers.ts b/eng/tools/spec-gen-sdk-runner/src/spec-helpers.ts new file mode 100644 index 000000000000..f79c7ce07248 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/src/spec-helpers.ts @@ -0,0 +1,487 @@ +import path from "node:path"; +import { + getChangedFiles, + searchRelatedParentFolders, + searchSharedLibrary, + searchRelatedTypeSpecProjectBySharedLibrary, + groupPathsByService, + createCombinedSpecs, + type SpecResults, + type ChangedSpecs, + type SpecConfigs, + getLastPathSegment, +} from "./utils.js"; +import { logMessage } from "./log.js"; +import { SpecGenSdkCmdInput } from "./types.js"; + +export const readmeMdRegex = /^readme.md$/; +export const typespecProjectRegex = /^tspconfig.yaml$/; +export const typespecProjectSharedLibraryRegex = /[^/]+\.Shared/; + +/** + * Processes typespec projects that follow the resource-manager or data-plane folder structure + * and matches them with corresponding readme files if they exist in the same folder. + * @param readmeMDResult - Object mapping folder paths to readme file paths + * @param typespecProjectResult - Object mapping folder paths to typespec project file paths + * @returns An array of ChangedSpecs objects containing the paths to the readme and TypeSpec config files + */ +export function processTypeSpecProjectsV2FolderStructure( + readmeMDResult: { [folderPath: string]: string[] }, + typespecProjectResult: { [folderPath: string]: string[] }, +): ChangedSpecs[] { + const changedSpecs: ChangedSpecs[] = []; + + // Iterate through each typespec project folder + for (const folderPath of Object.keys(typespecProjectResult)) { + // Split the path into segments to check for specific components + const segments = folderPath.split(/[/\\]/); + // Check if the folder path contains resource-manager or data-plane segments + if (segments.includes("resource-manager") || segments.includes("data-plane")) { + const cs: ChangedSpecs = { + specs: [], + }; + + // Set the typespec project path + cs.typespecProject = path.join(folderPath, "tspconfig.yaml"); + // Initialize the specs array with typespec project files + cs.specs = [...typespecProjectResult[folderPath]]; // Check if the same folder has a readme.md file + if (readmeMDResult[folderPath]) { + cs.readmeMd = path.join(folderPath, "readme.md"); + // Merge the specs arrays, removing duplicates + cs.specs = [...new Set([...cs.specs, ...readmeMDResult[folderPath]])]; + // Remove the processed entry from readmeMDResult + delete readmeMDResult[folderPath]; + } + + // Add the ChangedSpecs object to the result array + changedSpecs.push(cs); + // Remove the processed entry from typespecProjectResult + delete typespecProjectResult[folderPath]; + + // Delete readme entries that match specific folder structure patterns and are in the same parent folder hierarchy + // such as: + // "specification/service/data-plane" + // "specification/service/resource-manager" + // "specification/service/resource-manager/Microsoft.Service" + for (const readmePath of Object.keys(readmeMDResult)) { + // Split the paths into segments to work with path components rather than raw strings with separators + const folderSegments = folderPath.split(/[/\\]/); // Split on either / or \ + const readmeSegments = readmePath.split(/[/\\]/); + + // Find the position of "resource-manager" or "data-plane" in folder segments + const rmIndex = folderSegments.indexOf("resource-manager"); + const dpIndex = folderSegments.indexOf("data-plane"); + + // For resource-manager paths + if (rmIndex !== -1) { + // Get the service path segments (everything before resource-manager) + const serviceSegments = folderSegments.slice(0, rmIndex); + // Check if readmePath shares the same service prefix + const isRelatedService = serviceSegments.every( + (segment, i) => i < readmeSegments.length && readmeSegments[i] === segment, + ); + + if (isRelatedService) { + // Case 1: Readme ends with resource-manager + // Example: specification/service/resource-manager + if ( + readmeSegments.length === rmIndex + 1 && + readmeSegments[rmIndex] === "resource-manager" + ) { + logMessage(`\t Removing related readme: ${readmePath} for folder: ${folderPath}`); + delete readmeMDResult[readmePath]; + continue; + } + + // Case 2: Readme is one level down from resource-manager + // Example: specification/service/resource-manager/Microsoft.Service + if ( + readmeSegments.length === rmIndex + 2 && + readmeSegments[rmIndex] === "resource-manager" && + folderSegments.length > rmIndex + 1 && + folderSegments[rmIndex + 1] === readmeSegments[rmIndex + 1] + ) { + logMessage(`\t Removing related readme: ${readmePath} for folder: ${folderPath}`); + delete readmeMDResult[readmePath]; + continue; + } + } + } + // For data-plane paths + else if (dpIndex !== -1) { + // Get the service path segments (everything before data-plane) + const serviceSegments = folderSegments.slice(0, dpIndex); + // Check if readmePath shares the same service prefix and ends with data-plane + const isRelatedService = serviceSegments.every( + (segment, i) => i < readmeSegments.length && readmeSegments[i] === segment, + ); + + if ( + isRelatedService && + readmeSegments.length === dpIndex + 1 && + readmeSegments[dpIndex] === "data-plane" + ) { + logMessage(`\t Removing related readme: ${readmePath} for folder: ${folderPath}`); + delete readmeMDResult[readmePath]; + } + } + } + } + } + + return changedSpecs; +} + +export function detectChangedSpecConfigFiles(commandInput: SpecGenSdkCmdInput): ChangedSpecs[] { + const prChangedFiles: string[] = getChangedFiles(commandInput.localSpecRepoPath) ?? []; + if (prChangedFiles.length === 0) { + logMessage("No files changed in the PR"); + } + const normalizedChangedFiles = prChangedFiles.map((f) => f.replaceAll("\\", "/")); + logMessage(`Changed files in the PR: ${normalizedChangedFiles.length}`); + for (const file of normalizedChangedFiles) { + logMessage(`\t${file}`); + } + const fileList = normalizedChangedFiles + .filter((p) => p.startsWith("specification/")) + .filter((p) => !p.includes("/scenarios/")); + + if (fileList.length === 0) { + logMessage("No relevant files changed under 'specification' folder in the PR"); + return []; + } + logMessage(`Related readme.md and typespec project list:`); + const changedSpecs: ChangedSpecs[] = []; + + const readmeMDResult = searchRelatedParentFolders(fileList, { + searchFileRegex: readmeMdRegex, + specRepoFolder: commandInput.localSpecRepoPath, + stopAtFolder: "specification", + }); + + const typespecProjectResult = searchRelatedParentFolders(fileList, { + searchFileRegex: typespecProjectRegex, + specRepoFolder: commandInput.localSpecRepoPath, + stopAtFolder: "specification", + }); + + const typespecProjectSharedLibraries = searchSharedLibrary(fileList, { + searchFileRegex: typespecProjectSharedLibraryRegex, + specRepoFolder: commandInput.localSpecRepoPath, + }); + + const typespecProjectResultSearchedBySharedLibrary = searchRelatedTypeSpecProjectBySharedLibrary( + typespecProjectSharedLibraries, + { + searchFileRegex: typespecProjectRegex, + specRepoFolder: commandInput.localSpecRepoPath, + }, + ); + + // Merge typespec project results + for (const folderPath of Object.keys(typespecProjectResultSearchedBySharedLibrary)) { + if (typespecProjectResult[folderPath]) { + typespecProjectResult[folderPath] = [ + ...typespecProjectResult[folderPath], + ...typespecProjectResultSearchedBySharedLibrary[folderPath], + ]; + } else { + typespecProjectResult[folderPath] = typespecProjectResultSearchedBySharedLibrary[folderPath]; + } + } + + // Process TypeSpec projects with the V2 folder structure + const newFolderStructureSpecs = processTypeSpecProjectsV2FolderStructure( + readmeMDResult, + typespecProjectResult, + ); + + if (newFolderStructureSpecs.length > 0) { + logMessage(`Found ${newFolderStructureSpecs.length} specs with the new folder structure`); + changedSpecs.push(...newFolderStructureSpecs); + for (const spec of newFolderStructureSpecs) { + logMessage(`\t\t tspconfig: ${spec.typespecProject}, readme: ${spec.readmeMd}`); + } + } + + // Process TypeSpec projects with the old folder structure + if (Object.keys(readmeMDResult).length > 0 && Object.keys(typespecProjectResult).length > 0) { + // Group paths by service + const serviceMap = groupPathsByService(readmeMDResult, typespecProjectResult); + + const results: SpecResults = { readmeMDResult, typespecProjectResult }; + + // Process each service + for (const [, info] of serviceMap) { + // Case: Resource Manager with .Management + if (info.managementPaths.length > 0) { + if (info.resourceManagerPaths.length === 1) { + // Single resource-manager path - match with all Management paths + const newSpecs = createCombinedSpecs( + info.resourceManagerPaths[0].path, + info.managementPaths, + results, + ); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${info.resourceManagerPaths[0].path}, tspconfig folders: ${info.managementPaths}`, + ); + for (const p of info.managementPaths) { + delete typespecProjectResult[p]; + } + delete readmeMDResult[info.resourceManagerPaths[0].path]; + } else { + // Multiple resource-manager paths - match by subfolder name + for (const rmPath of info.resourceManagerPaths) { + const matchingManagements = info.managementPaths.filter((mPath) => { + const rmSubPath = rmPath.subPath; + const managementName = getLastPathSegment(mPath).replace(".Management", ""); + return rmSubPath && rmSubPath === managementName; + }); + if (matchingManagements.length > 0) { + const newSpecs = createCombinedSpecs(rmPath.path, matchingManagements, results); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${rmPath.path}, tspconfig folders: ${matchingManagements}`, + ); + for (const p of matchingManagements) { + delete typespecProjectResult[p]; + } + delete readmeMDResult[rmPath.path]; + } + } + } + } + + // Case: Data Plane matching + if (info.dataPlanePaths.length > 0 && info.otherTypeSpecPaths.length > 0) { + if (info.dataPlanePaths.length === 1) { + // Single data-plane path - match with all non-Management TypeSpec paths + const newSpecs = createCombinedSpecs( + info.dataPlanePaths[0].path, + info.otherTypeSpecPaths, + results, + ); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${info.dataPlanePaths[0].path}, tspconfig folders: ${info.otherTypeSpecPaths}`, + ); + for (const p of info.otherTypeSpecPaths) { + delete typespecProjectResult[p]; + } + delete readmeMDResult[info.dataPlanePaths[0].path]; + } else { + // Multiple data-plane paths - match by subfolder name + for (const dpPath of info.dataPlanePaths) { + const matchingTypeSpecs = info.otherTypeSpecPaths.filter((tsPath) => { + const dpSubFolder = dpPath.subFolder; + const tsLastSegment = getLastPathSegment(tsPath); + return dpSubFolder && dpSubFolder === tsLastSegment; + }); + if (matchingTypeSpecs.length > 0) { + const newSpecs = createCombinedSpecs(dpPath.path, matchingTypeSpecs, results); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${dpPath.path}, tspconfig folders: ${matchingTypeSpecs}`, + ); + for (const p of matchingTypeSpecs) { + delete typespecProjectResult[p]; + } + delete readmeMDResult[dpPath.path]; + } + } + } + } + } + } + + // Process remaining unmatched paths + for (const folderPath of new Set([ + ...Object.keys(readmeMDResult), + ...Object.keys(typespecProjectResult), + ])) { + const cs: ChangedSpecs = { + specs: [], + }; + + if (typespecProjectResult[folderPath]) { + cs.specs = typespecProjectResult[folderPath]; + cs.typespecProject = path.join(folderPath, "tspconfig.yaml"); + logMessage(`\t tspconfig: ${cs.typespecProject}`); + } else { + cs.readmeMd = path.join(folderPath, "readme.md"); + cs.specs = readmeMDResult[folderPath]; + logMessage(`\t readme: ${cs.readmeMd}`); + } + + changedSpecs.push(cs); + } + + return changedSpecs; +} + +/** + * Grouping spec configs by service based on the provided TypeSpec configs and readme files. + * @param tspconfigs - Array of TypeSpec config file paths. + * @param readmes - Array of readme file paths. + * @param skipUnmatchedReadme - Flag to skip unmatched readme files. + * @returns An array of SpecConfigs objects containing the paths to the readme and TypeSpec config files. + */ +export function groupSpecConfigPaths( + tspconfigs?: string[], + readmes?: string[], + skipUnmatchedReadme: boolean = false, +): SpecConfigs[] { + const emptyArray: string[] = []; + const safeTspConfigs = tspconfigs ?? emptyArray; + const safeReadmes = readmes ?? emptyArray; + + // Quick return for simple cases + if (safeTspConfigs.length === 0 && safeReadmes.length === 0) { + return []; + } else if (safeTspConfigs.length > 0 && safeReadmes.length === 0) { + return safeTspConfigs.map((c) => ({ tspconfigPath: c })); + } else if (safeReadmes.length > 0 && safeTspConfigs.length === 0) { + return safeReadmes.map((c) => ({ readmePath: c })); + } + + // Get folder paths from spec configs + const tspconfigFolderPaths: string[] = safeTspConfigs.map((p) => + p.slice(0, Math.max(0, p.lastIndexOf("/"))), + ); + const readmeFolderPaths: string[] = safeReadmes.map((p) => + p.slice(0, Math.max(0, p.lastIndexOf("/"))), + ); + + // Create plain objects to satisfy the input of groupPathsByService + const tspconfigFolderMap: { [folderPath: string]: string[] } = {}; + for (const folderPath of tspconfigFolderPaths) { + tspconfigFolderMap[folderPath] = []; + } + + const readmeFolderMap: { [folderPath: string]: string[] } = {}; + for (const folderPath of readmeFolderPaths) { + readmeFolderMap[folderPath] = []; + } + + // Group paths by service + const serviceMap = groupPathsByService(readmeFolderMap, tspconfigFolderMap); + + const changedSpecs: ChangedSpecs[] = []; + const results: SpecResults = { + readmeMDResult: readmeFolderMap, + typespecProjectResult: tspconfigFolderMap, + }; + + // Process each service + for (const [, info] of serviceMap) { + // Case: Resource Manager with .Management + if (info.managementPaths.length > 0) { + if (info.resourceManagerPaths.length === 1) { + // Single resource-manager path - match with all Management paths + const newSpecs = createCombinedSpecs( + info.resourceManagerPaths[0].path, + info.managementPaths, + results, + ); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${info.resourceManagerPaths[0].path}, tspconfig folders: ${info.managementPaths}`, + ); + for (const p of info.managementPaths) { + delete tspconfigFolderMap[p]; + } + delete readmeFolderMap[info.resourceManagerPaths[0].path]; + } else { + // Multiple resource-manager paths - match by subfolder name + for (const rmPath of info.resourceManagerPaths) { + const matchingManagements = info.managementPaths.filter((mPath) => { + const rmSubPath = rmPath.subPath; + const managementName = getLastPathSegment(mPath).replace(".Management", ""); + return rmSubPath && rmSubPath === managementName; + }); + if (matchingManagements.length > 0) { + const newSpecs = createCombinedSpecs(rmPath.path, matchingManagements, results); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${rmPath.path}, tspconfig folders: ${matchingManagements}`, + ); + for (const p of matchingManagements) { + delete tspconfigFolderMap[p]; + } + delete readmeFolderMap[rmPath.path]; + } + } + } + } + + // Case: Data Plane matching + if (info.dataPlanePaths.length > 0 && info.otherTypeSpecPaths.length > 0) { + if (info.dataPlanePaths.length === 1) { + // Single data-plane path - match with all non-Management TypeSpec paths + const newSpecs = createCombinedSpecs( + info.dataPlanePaths[0].path, + info.otherTypeSpecPaths, + results, + ); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${info.dataPlanePaths[0].path}, tspconfig folders: ${info.otherTypeSpecPaths}`, + ); + for (const p of info.otherTypeSpecPaths) { + delete tspconfigFolderMap[p]; + } + delete readmeFolderMap[info.dataPlanePaths[0].path]; + } else { + // Multiple data-plane paths - match by subfolder name + for (const dpPath of info.dataPlanePaths) { + const matchingTypeSpecs = info.otherTypeSpecPaths.filter((tsPath) => { + const dpSubFolder = dpPath.subFolder; + const tsLastSegment = getLastPathSegment(tsPath); + return dpSubFolder && dpSubFolder === tsLastSegment; + }); + if (matchingTypeSpecs.length > 0) { + const newSpecs = createCombinedSpecs(dpPath.path, matchingTypeSpecs, results); + changedSpecs.push(...newSpecs); + logMessage( + `\t readme folders: ${dpPath.path}, tspconfig folders: ${matchingTypeSpecs}`, + ); + for (const p of matchingTypeSpecs) { + delete tspconfigFolderMap[p]; + } + delete readmeFolderMap[dpPath.path]; + } + } + } + } + } + + // Process remaining unmatched paths + for (const folderPath of new Set([ + ...Object.keys(readmeFolderMap), + ...Object.keys(tspconfigFolderMap), + ])) { + const cs: ChangedSpecs = { + specs: [], + }; + + if (tspconfigFolderMap[folderPath]) { + cs.specs = tspconfigFolderMap[folderPath]; + cs.typespecProject = path.join(folderPath, "tspconfig.yaml"); + changedSpecs.push(cs); + logMessage(`\t tspconfig: ${cs.typespecProject}`); + } else if (!skipUnmatchedReadme) { + cs.readmeMd = path.join(folderPath, "readme.md"); + cs.specs = readmeFolderMap[folderPath]; + changedSpecs.push(cs); + logMessage(`\t readme: ${cs.readmeMd}`); + } + } + + // Map ChangedSpecs to SpecConfigs + const specConfigs = changedSpecs.map((cs) => ({ + readmePath: cs.readmeMd, + tspconfigPath: cs.typespecProject, + })); + return specConfigs; +} diff --git a/eng/tools/spec-gen-sdk-runner/src/types.ts b/eng/tools/spec-gen-sdk-runner/src/types.ts index 49faca574049..e50d63947b77 100644 --- a/eng/tools/spec-gen-sdk-runner/src/types.ts +++ b/eng/tools/spec-gen-sdk-runner/src/types.ts @@ -3,12 +3,13 @@ */ export interface SpecGenSdkCmdInput { workingFolder: string; - isTriggeredByPipeline: string; + runMode: string; localSpecRepoPath: string; localSdkRepoPath: string; tspConfigPath?: string; readmePath?: string; sdkRepoName: string; + sdkLanguage: string; apiVersion?: string; prNumber?: string; sdkReleaseType?: string; @@ -18,6 +19,14 @@ export interface SpecGenSdkCmdInput { headBranch?: string; } +/** + * Data for the API view request. + */ +export interface APIViewRequestData { + packageName: string; + filePath: string; +} + /* * VsoLogs is a map of task names to log entries. Each log entry contains an array of errors and warnings. */ @@ -28,3 +37,66 @@ export type VsoLogs = Map< warnings?: string[]; } >; + +/** + * Represents the result of the spec-gen-sdk generation process. + */ +export interface SpecGenSdkArtifactInfo { + language: string; + result: string; + prNumber?: string; + labelAction?: boolean; + isSpecGenSdkCheckRequired: boolean; + apiViewRequestData: APIViewRequestData[]; +} + +/** + * Represents supported SDK language identifiers. + */ +export type SdkName = + | "azure-sdk-for-go" + | "azure-sdk-for-java" + | "azure-sdk-for-js" + | "azure-sdk-for-net" + | "azure-sdk-for-python"; + +/** + * Represents the plane types for SDK generation settings + */ +export interface PlaneTypeSettings { + /** + * Whether spec-gen-sdk check is required for data plane + */ + dataPlane: boolean; + + /** + * Whether spec-gen-sdk check is required for management plane + */ + managementPlane: boolean; +} + +/** + * Required check settings for all languages. + */ +export const SpecGenSdkRequiredSettings: Record = { + "azure-sdk-for-go": { + dataPlane: true, + managementPlane: true, + }, + "azure-sdk-for-java": { + dataPlane: false, + managementPlane: true, + }, + "azure-sdk-for-js": { + dataPlane: false, + managementPlane: true, + }, + "azure-sdk-for-net": { + dataPlane: false, + managementPlane: false, + }, + "azure-sdk-for-python": { + dataPlane: true, + managementPlane: true, + }, +}; diff --git a/eng/tools/spec-gen-sdk-runner/src/utils.ts b/eng/tools/spec-gen-sdk-runner/src/utils.ts index 60d506869305..32d01a70384b 100644 --- a/eng/tools/spec-gen-sdk-runner/src/utils.ts +++ b/eng/tools/spec-gen-sdk-runner/src/utils.ts @@ -120,11 +120,20 @@ export function getAllTypeSpecPaths(specRepoPath: string): string[] { } } -/* - * Run the PowerShell script +/** + * Runs a PowerShell script with the given arguments. + * Automatically detects the correct executable path for the current platform (Windows/Linux/macOS). + * Logs errors and warnings as appropriate. */ export function runPowerShellScript(args: string[]): string | undefined { - const result = spawnSync("/usr/bin/pwsh", args, { encoding: "utf8" }); + const pwshPath = getPwshExecutablePath(); + if (!pwshPath) { + logMessage("No valid PowerShell executable found on this system.", LogLevel.Error); + return undefined; + } + + const result = spawnSync(pwshPath, args, { encoding: "utf8" }); + if (result.error) { logMessage(`Error executing PowerShell script:${result.error}`, LogLevel.Error); return undefined; @@ -135,13 +144,42 @@ export function runPowerShellScript(args: string[]): string | undefined { return result.stdout?.trim(); } +/** + * Determines the appropriate PowerShell executable path for the current OS. + * Prefers 'pwsh' (PowerShell Core) over 'powershell' (Windows PowerShell). + * Returns undefined if no executable is found. + */ +function getPwshExecutablePath(): string | undefined { + const isWindows = process.platform === "win32"; + const candidates = isWindows ? ["pwsh.exe", "powershell.exe"] : ["pwsh"]; // Linux/macOS generally only support pwsh + for (const cmd of candidates) { + if (isCommandAvailable(cmd)) { + return cmd; + } + } + return undefined; +} + +/** + * Checks if a command is available in PATH by trying to spawn it with '--version'. + */ +function isCommandAvailable(command: string): boolean { + try { + const result = spawnSync(command, ["--version"], { encoding: "utf8" }); + return result.status === 0; + } catch { + return false; + } +} + // Function to call Get-ChangedFiles from PowerShell script export function getChangedFiles( specRepoPath: string, baseCommitish: string = "HEAD^", targetCommitish: string = "HEAD", - diffFilter: string = "d", ): string[] | undefined { + // set diff filter to include added, copied, modified, deleted, renamed, and type changed files + const diffFilter = "ACMDRT"; const scriptPath = path.resolve(specRepoPath, "eng/scripts/ChangedFiles-Functions.ps1"); const args = [ "-Command", @@ -186,7 +224,9 @@ export function findParentWithFile( return undefined; } currentPath = path.dirname(currentPath); - if (stopAtFolder && currentPath === stopAtFolder) { + // Check if we've reached the root of the path (stopAtFolder) or + // if we've reached '.' which prevents infinite loops with path.dirname('.') + if ((stopAtFolder && currentPath === stopAtFolder) || currentPath === ".") { return undefined; } } @@ -290,7 +330,7 @@ export function searchRelatedTypeSpecProjectBySharedLibrary( continue; } - const peerPath = path.join(parentDir, peerDir.name); + const peerPath = normalizePath(path.join(parentDir, peerDir.name)); try { const peerFiles = fs.readdirSync(path.resolve(options.specRepoFolder, peerPath)); if (peerFiles.some((file) => options.searchFileRegex.test(file.toLowerCase()))) { @@ -370,7 +410,7 @@ export function groupPathsByService( } const info = serviceMap.get(serviceName)!; - if (folderPath.endsWith(".Management")) { + if (folderPath.endsWith(".Management") || folderPath.includes("resource-manager")) { info.managementPaths.push(folderPath); } else { info.otherTypeSpecPaths.push(folderPath); @@ -401,6 +441,11 @@ export type ChangedSpecs = { specs: string[]; }; +export type SpecConfigs = { + readmePath?: string; + tspconfigPath?: string; +}; + /** * Creates combined specs from readme and typespec paths * @param readmePath - Path to the readme file @@ -444,3 +489,14 @@ export function objectToMap(obj: Record): Map { } return map; } + +/** + * Normalizes a Windows-style path by converting backslashes (`\`) to slashes (`/`) + * Only performs conversion on Windows systems. No effect on Linux/macOS. + */ +export function normalizePath(p: string): string { + if (process.platform === "win32") { + return p.replaceAll("\\", "/"); + } + return p; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/__snapshots__/commands.test.ts.snap b/eng/tools/spec-gen-sdk-runner/test/__snapshots__/commands.test.ts.snap new file mode 100644 index 000000000000..693b4d10a9db --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/__snapshots__/commands.test.ts.snap @@ -0,0 +1,54 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`generateSdkForBatchSpecs > should generate SDKs for all specs successfully 1`] = ` +[ + [ + "/working/folder/out/logs/generation-summary.md", + " +## Batch Run Type + all-specs +## Successful Specs in the Generation Process +typespec1,typespec2,typespec3,typespec4, +## Total Successful Specs + 4 +## Total Specs Count + 4 + +", + ], +] +`; + +exports[`generateSdkForBatchSpecs > should handle empty spec paths gracefully 1`] = ` +[ + [ + "/working/folder/out/logs/generation-summary.md", + " +## Batch Run Type + all-specs +## Total Specs Count + 0 + +", + ], +] +`; + +exports[`generateSdkForBatchSpecs > should handle errors during SDK generation 1`] = ` +[ + [ + "/working/folder/out/logs/generation-summary.md", + " +## Batch Run Type + all-specs +## Spec Failures in the Generation Process +typespec1,typespec2, +## Total Failed Specs + 2 +## Total Specs Count + 2 + +", + ], +] +`; diff --git a/eng/tools/spec-gen-sdk-runner/test/command-helpers.test.ts b/eng/tools/spec-gen-sdk-runner/test/command-helpers.test.ts new file mode 100644 index 000000000000..9da7002439cd --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/command-helpers.test.ts @@ -0,0 +1,615 @@ +import { describe, test, expect, vi, beforeEach } from "vitest"; +import * as log from "../src/log.js"; +import * as utils from "../src/utils.js"; +import * as specHelpers from "../src/spec-helpers.js"; +import { fileURLToPath } from "node:url"; +import fs from "node:fs"; +import path from "node:path"; +import { + getBreakingChangeInfo, + getRequiredSettingValue, + getSpecPaths, + logIssuesToPipeline, + parseArguments, + prepareSpecGenSdkCommand, + generateArtifact, + setPipelineVariables, +} from "../src/command-helpers.js"; +import { LogLevel } from "../src/log.js"; +import { APIViewRequestData } from "../src/types.js"; + +// Get the absolute path to the repo root +const currentFilePath = fileURLToPath(import.meta.url); +const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); + +describe("commands.ts", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe("setPipelineVariables", () => { + test("should set pipeline variables correctly", () => { + vi.spyOn(log, "setVsoVariable").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + setPipelineVariables( + "path-to-artifact", + false, + "sdk/security/keyvault/azcertificates", + "Configurations: 'specification/contosowidgetmanager/resource-manager/readme.md', and CommitSHA: 'commitsha', in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs'", + ); + + expect(log.setVsoVariable).toHaveBeenCalledWith("StagedArtifactsFolder", "path-to-artifact"); + expect(log.setVsoVariable).toHaveBeenCalledWith( + "PrBranch", + "sdkauto/sdk-security/keyvault/azcertificates", + ); + expect(log.setVsoVariable).toHaveBeenCalledWith( + "PrTitle", + "[AutoPR sdk/security/keyvault/azcertificates]", + ); + expect(log.setVsoVariable).toHaveBeenCalledWith( + "PrBody", + "Configurations: 'specification/contosowidgetmanager/resource-manager/readme.md', and CommitSHA: 'commitsha', in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs'", + ); + }); + test("should skip PR related variable settings correctly", () => { + vi.spyOn(log, "setVsoVariable").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + setPipelineVariables("path-to-artifact"); + + expect(log.setVsoVariable).toHaveBeenCalledOnce(); + expect(log.setVsoVariable).toHaveBeenCalledWith("StagedArtifactsFolder", "path-to-artifact"); + }); + }); + + describe("parseArguments", () => { + test("runMode is release when it has no batch-type and no pr-number", () => { + const mockArgs = [ + "--scp", + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + "--lang", + "azure-sdk-for-go", + ]; + vi.spyOn(process, "argv", "get").mockReturnValue(["node", "script", ...mockArgs]); + const result = parseArguments(); + expect(result.localSpecRepoPath).toBe( + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + ); + expect(result.sdkRepoName).toBe("azure-sdk-for-go"); + expect(result.prNumber).toBe(""); + expect(result.runMode).toBe("release"); + }); + + test("runMode is release when it has pr-number", () => { + const mockArgs = [ + "--scp", + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + "--pr-number", + "1234", + ]; + vi.spyOn(process, "argv", "get").mockReturnValue(["node", "script", ...mockArgs]); + + const result = parseArguments(); + + expect(result.localSpecRepoPath).toBe( + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + ); + expect(result.sdkRepoName).toBe("azure-sdk-for-net"); + expect(result.prNumber).toBe("1234"); + expect(result.runMode).toBe("spec-pull-request"); + }); + + test("runMode is batch when it has batch-type", () => { + const mockArgs = [ + "--scp", + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + "--batch-type", + "all-specs", + ]; + vi.spyOn(process, "argv", "get").mockReturnValue(["node", "script", ...mockArgs]); + + const result = parseArguments(); + + expect(result.localSpecRepoPath).toBe( + path.normalize( + `${repoRoot}specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json"`, + ), + ); + expect(result.sdkRepoName).toBe("azure-sdk-for-net"); + expect(result.prNumber).toBe(""); + expect(result.runMode).toBe("batch"); + }); + }); + + describe("prepareSpecGenSdkCommand", () => { + test("should prepare the command correctly", () => { + const commandInput = { + localSpecRepoPath: "/spec/path", + localSdkRepoPath: "/sdk/path", + workingFolder: "/working/folder", + sdkRepoName: "azure-sdk-for-js", + specCommitSha: "abc123", + runMode: "release", + specRepoHttpsUrl: "https://github.com/spec", + prNumber: "123", + tspConfigPath: "config/path", + readmePath: "readme/path", + headRepoHttpsUrl: "https://github.com/head", + headBranch: "main", + apiVersion: "2021-01-01", + sdkReleaseType: "beta", + sdkLanguage: "typescript", + }; + + const result = prepareSpecGenSdkCommand(commandInput); + + expect(result).toContain("spec-gen-sdk"); + expect(result).toContain("--scp"); + expect(result).toContain("/spec/path"); + expect(result).toContain("--pr-number"); + expect(result).toContain("123"); + }); + }); + + describe("getSpecPaths", () => { + test("should return both TypeSpec and readme paths for 'all-specs' batch type", () => { + vi.spyOn(utils, "getAllTypeSpecPaths").mockReturnValue(["typespec1", "typespec2"]); + vi.spyOn(utils, "findReadmeFiles").mockReturnValue(["readme1", "readme2"]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { tspconfigPath: "typespec1", readmePath: undefined }, + { tspconfigPath: "typespec2", readmePath: undefined }, + { tspconfigPath: undefined, readmePath: "readme1" }, + { tspconfigPath: undefined, readmePath: "readme2" }, + ]); + + const result = getSpecPaths("all-specs", "/spec/path"); + + expect(utils.getAllTypeSpecPaths).toHaveBeenCalledWith("/spec/path"); + expect(utils.findReadmeFiles).toHaveBeenCalledWith(path.join("/spec/path", "specification")); + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + ["typespec1", "typespec2"], + ["readme1", "readme2"], + false, + ); + expect(result).toHaveLength(4); + }); + + test("should return only readme paths for 'all-openapis' batch type", () => { + vi.spyOn(utils, "findReadmeFiles").mockReturnValue(["readme1", "readme2"]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { tspconfigPath: undefined, readmePath: "readme1" }, + { tspconfigPath: undefined, readmePath: "readme2" }, + ]); + + const result = getSpecPaths("all-openapis", "/spec/path"); + + expect(utils.findReadmeFiles).toHaveBeenCalledWith(path.join("/spec/path", "specification")); + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + [], + ["readme1", "readme2"], + false, + ); + expect(result).toHaveLength(2); + }); + + test("should return both TypeSpec and readme paths for 'all-typespecs' batch type", () => { + vi.spyOn(utils, "getAllTypeSpecPaths").mockReturnValue(["typespec1", "typespec2"]); + vi.spyOn(utils, "findReadmeFiles").mockReturnValue(["readme1", "readme2"]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { tspconfigPath: "typespec1", readmePath: undefined }, + { tspconfigPath: "typespec2", readmePath: undefined }, + { tspconfigPath: undefined, readmePath: "readme1" }, + { tspconfigPath: undefined, readmePath: "readme2" }, + ]); + + const result = getSpecPaths("all-typespecs", "/spec/path"); + + expect(utils.getAllTypeSpecPaths).toHaveBeenCalledWith("/spec/path"); + expect(utils.findReadmeFiles).toHaveBeenCalledWith(path.join("/spec/path", "specification")); + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + ["typespec1", "typespec2"], + ["readme1", "readme2"], + true, + ); + expect(result).toHaveLength(4); + }); + + test("should return sample TypeSpec paths for 'sample-typespecs' batch type", () => { + vi.spyOn(utils, "getAllTypeSpecPaths").mockReturnValue(["typespec1", "typespec2"]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { + tspconfigPath: "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + readmePath: undefined, + }, + { + tspconfigPath: "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + readmePath: undefined, + }, + ]); + + const result = getSpecPaths("sample-typespecs", "/spec/path"); + + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + [ + "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ], + [], + false, + ); + expect(result).toHaveLength(2); + }); + + test("should return management plane TypeSpec and resource-manager readme paths for 'all-mgmtplane-typespecs'", () => { + const managementTypespecs = ["typespec1.Management", "typespec2.Management"]; + const resourceManagerReadmes = ["resource-manager/readme-rm1", "resource-manager/readme-rm2"]; + + vi.spyOn(utils, "getAllTypeSpecPaths").mockReturnValue([ + ...managementTypespecs, + "typespec3", + "typespec4", + ]); + vi.spyOn(utils, "findReadmeFiles").mockReturnValue([ + ...resourceManagerReadmes, + "readme-dp1", + "readme-dp2", + ]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { tspconfigPath: "typespec1.Management", readmePath: "resource-manager/readme-rm1" }, + { tspconfigPath: "typespec2.Management", readmePath: "resource-manager/readme-rm2" }, + ]); + + const result = getSpecPaths("all-mgmtplane-typespecs", "/spec/path"); + + expect(utils.getAllTypeSpecPaths).toHaveBeenCalledWith("/spec/path"); + expect(utils.findReadmeFiles).toHaveBeenCalledWith(path.join("/spec/path", "specification")); + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + managementTypespecs, + resourceManagerReadmes, + true, + ); + expect(result).toHaveLength(2); + }); + + test("should return data plane TypeSpec and data-plane readme paths for 'all-dataplane-typespecs'", () => { + const dataPlaneTypespecs = ["typespec3", "typespec4"]; + const dataPlaneReadmes = ["data-plane/readme-dp1", "data-plane/readme-dp2"]; + + vi.spyOn(utils, "getAllTypeSpecPaths").mockReturnValue([ + ...dataPlaneTypespecs, + "typespec1.Management", + "typespec2.Management", + ]); + vi.spyOn(utils, "findReadmeFiles").mockReturnValue([ + ...dataPlaneReadmes, + "readme-rm1", + "readme-rm2", + ]); + vi.spyOn(specHelpers, "groupSpecConfigPaths").mockReturnValue([ + { tspconfigPath: "typespec3", readmePath: undefined }, + { tspconfigPath: "typespec4", readmePath: undefined }, + { tspconfigPath: undefined, readmePath: "data-plane/readme-dp1" }, + { tspconfigPath: undefined, readmePath: "data-plane/readme-dp2" }, + ]); + + const result = getSpecPaths("all-dataplane-typespecs", "/spec/path"); + + expect(utils.getAllTypeSpecPaths).toHaveBeenCalledWith("/spec/path"); + expect(utils.findReadmeFiles).toHaveBeenCalledWith(path.join("/spec/path", "specification")); + expect(specHelpers.groupSpecConfigPaths).toHaveBeenCalledWith( + dataPlaneTypespecs, + dataPlaneReadmes, + true, + ); + expect(result).toHaveLength(4); + }); + }); + + describe("logIssuesToPipeline", () => { + test("should log errors and warnings to pipeline", () => { + const mockLogContent = { + key1: { errors: ["error1"], warnings: ["warning1"] }, + key2: { errors: ["error2"], warnings: [] }, + }; + vi.spyOn(fs, "readFileSync").mockReturnValue(JSON.stringify(mockLogContent)); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoLogIssue").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + logIssuesToPipeline("/log/path", "spec config"); + + expect(log.logMessage).toHaveBeenCalledWith( + "Errors occurred while generating SDK from spec config. Follow the steps at https://aka.ms/azsdk/sdk-automation-faq#how-to-view-the-detailed-sdk-generation-errors to view detailed errors.", + LogLevel.Group, + ); + expect(log.vsoLogIssue).toHaveBeenCalledWith( + "Errors occurred while generating SDK from spec config. Follow the steps at https://aka.ms/azsdk/sdk-automation-faq#how-to-view-the-detailed-sdk-generation-errors to view detailed errors.%0D%0Aerror1%0D%0Aerror2", + ); + }); + + test("should not log when there is no log message", () => { + const mockLogPath = "/log/path"; + const mockLogError = "ENOENT: no such file"; + vi.spyOn(fs, "readFileSync").mockImplementationOnce(() => { + throw new Error(mockLogError); + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoLogIssue").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + expect(() => { + logIssuesToPipeline("/log/path", "spec config"); + }).toThrow(`Runner: error reading log at ${mockLogPath}:Error: ${mockLogError}`); + expect(log.logMessage).not.toHaveBeenCalled(); + expect(log.vsoLogIssue).not.toHaveBeenCalled(); + }); + }); + + describe("getBreakingChangeInfo", () => { + test("should return breaking change info if applicable", () => { + const mockExecutionReport = { + packages: [{ shouldLabelBreakingChange: true }], + }; + + const result = getBreakingChangeInfo(mockExecutionReport); + + expect(result).toBe(true); + }); + + test("should return no breaking change info if not applicable", () => { + const mockExecutionReport = { + packages: [{ shouldLabelBreakingChange: false }], + }; + + const result = getBreakingChangeInfo(mockExecutionReport); + + expect(result).toBe(false); + }); + + test("should return no breaking change info if not executionReport", () => { + const mockExecutionReport = { + packages: [], + }; + + const result = getBreakingChangeInfo(mockExecutionReport); + + expect(result).toBe(false); + }); + }); + + describe("generateArtifact", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + test("should generate artifact successfully", () => { + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "mkdirSync").mockImplementation(() => undefined); + vi.spyOn(fs, "writeFileSync").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "setVsoVariable").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + // No need to mock getRequiredSettingValue for this test + // We'll just verify the output structure instead + + const mockCommandInput = { + workingFolder: "/working/folder", + sdkLanguage: "azure-sdk-for-js", + runMode: "", + localSpecRepoPath: "", + localSdkRepoPath: "", + sdkRepoName: "", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockResult = "succeeded"; + const mockhasBreakingChange = false; + const mockhasManagementPlaneSpecs = false; + const mockStagedArtifactsFolder = "mockStagedArtifactsFolder"; + const mockApiViewRequestData: APIViewRequestData[] = []; + const result = generateArtifact( + mockCommandInput, + mockResult, + mockhasBreakingChange, + mockhasManagementPlaneSpecs, + mockStagedArtifactsFolder, + mockApiViewRequestData, + ); + + const breakingChangeLabelArtifactPath = path.normalize( + "/working/folder/out/spec-gen-sdk-artifact", + ); + + expect(result).toBe(0); + expect(fs.mkdirSync).toHaveBeenCalledWith(breakingChangeLabelArtifactPath, { + recursive: true, + }); + // Since we're not mocking getRequiredSettingValue properly in this test, + // we'll just verify the output contains the expected isSpecGenSdkCheckRequired value + expect(fs.writeFileSync).toHaveBeenCalledWith( + path.join(breakingChangeLabelArtifactPath, "spec-gen-sdk-artifact.json"), + JSON.stringify( + { + language: "azure-sdk-for-js", + result: "succeeded", + labelAction: false, + isSpecGenSdkCheckRequired: false, + apiViewRequestData: [], + }, + undefined, + 2, + ), + ); + expect(log.setVsoVariable).toHaveBeenCalledWith( + "SpecGenSdkArtifactName", + "spec-gen-sdk-artifact", + ); + expect(log.setVsoVariable).toHaveBeenCalledWith( + "SpecGenSdkArtifactPath", + "out/spec-gen-sdk-artifact", + ); + + expect(log.setVsoVariable).toHaveBeenCalledWith( + "StagedArtifactsFolder", + "mockStagedArtifactsFolder", + ); + expect(log.setVsoVariable).toHaveBeenCalledWith("HasAPIViewArtifact", "false"); + }); + + test("should handle errors during artifact generation", () => { + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "mkdirSync").mockImplementation(() => { + throw new Error("mkdir failed"); + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoLogIssue").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const mockCommandInput = { + workingFolder: "/working/folder", + sdkLanguage: "javascript", + runMode: "", + localSpecRepoPath: "", + localSdkRepoPath: "", + sdkRepoName: "", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + const mockResult = "failed"; + const mockhasBreakingChange = false; + const mockhasManagementPlaneSpecs = false; + const mockStagedArtifactsFolder = ""; + const mockApiViewRequestData: APIViewRequestData[] = []; + const result = generateArtifact( + mockCommandInput, + mockResult, + mockhasBreakingChange, + mockhasManagementPlaneSpecs, + mockStagedArtifactsFolder, + mockApiViewRequestData, + ); + + expect(result).toBe(1); + expect(log.logMessage).toHaveBeenCalledWith("ending group logging", LogLevel.EndGroup); + }); + + test("should set isSpecGenSdkCheckRequired to false when sdkGenerationExecuted is false", () => { + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "mkdirSync").mockImplementation(() => undefined); + vi.spyOn(fs, "writeFileSync").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "setVsoVariable").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + // Mock getRequiredSettingValue to verify it's not called when sdkGenerationExecuted is false + const getRequiredSettingValueSpy = vi.spyOn( + { getRequiredSettingValue }, + "getRequiredSettingValue", + ); + + const mockCommandInput = { + workingFolder: "/working/folder", + sdkLanguage: "azure-sdk-for-go", + runMode: "", + localSpecRepoPath: "", + localSdkRepoPath: "", + sdkRepoName: "", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockResult = "succeeded"; + const mockhasBreakingChange = false; + // Using true for hasManagementPlaneSpecs, which would normally make isSpecGenSdkCheckRequired=true + // for Go SDK (as tested in the getRequiredSettingValue tests) + const mockhasManagementPlaneSpecs = true; + const mockStagedArtifactsFolder = "mockStagedArtifactsFolder"; + const mockApiViewRequestData: APIViewRequestData[] = []; + + // Explicitly passing false for sdkGenerationExecuted + const result = generateArtifact( + mockCommandInput, + mockResult, + mockhasBreakingChange, + mockhasManagementPlaneSpecs, + mockStagedArtifactsFolder, + mockApiViewRequestData, + false, // sdkGenerationExecuted = false + ); + + const breakingChangeLabelArtifactPath = path.normalize( + "/working/folder/out/spec-gen-sdk-artifact", + ); + + expect(result).toBe(0); + // Verify getRequiredSettingValue was not called + expect(getRequiredSettingValueSpy).not.toHaveBeenCalled(); + + // Verify isSpecGenSdkCheckRequired is false in the written file + expect(fs.writeFileSync).toHaveBeenCalledWith( + path.join(breakingChangeLabelArtifactPath, "spec-gen-sdk-artifact.json"), + JSON.stringify( + { + language: "azure-sdk-for-go", + result: "succeeded", + labelAction: false, + isSpecGenSdkCheckRequired: false, // This should be false when sdkGenerationExecuted is false + apiViewRequestData: [], + }, + undefined, + 2, + ), + ); + }); + }); + + describe("getRequiredSettingValue", () => { + test("should return managementPlane setting when hasManagementPlaneSpecs is true", () => { + const result = getRequiredSettingValue(true, "azure-sdk-for-go"); + // Based on the constants in types.ts, Go SDK requires check for management plane + expect(result).toBe(true); + + const result2 = getRequiredSettingValue(true, "azure-sdk-for-net"); + // Based on the constants in types.ts, .NET SDK does not require check for management plane + expect(result2).toBe(false); + }); + + test("should return dataPlane setting when hasManagementPlaneSpecs is false", () => { + const result = getRequiredSettingValue(false, "azure-sdk-for-go"); + // Based on the constants in types.ts, Go SDK requires check for data plane + expect(result).toBe(true); + + const result2 = getRequiredSettingValue(false, "azure-sdk-for-js"); + // Based on the constants in types.ts, JS SDK does not require check for data plane + expect(result2).toBe(false); + }); + }); +}); diff --git a/eng/tools/spec-gen-sdk-runner/test/commands.test.ts b/eng/tools/spec-gen-sdk-runner/test/commands.test.ts new file mode 100644 index 000000000000..a10c890d766a --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/commands.test.ts @@ -0,0 +1,617 @@ +import { describe, test, expect, vi, beforeEach, type Mock } from "vitest"; +import * as utils from "../src/utils.js"; +import { + generateSdkForBatchSpecs, + generateSdkForSingleSpec, + generateSdkForSpecPr, +} from "../src/commands.js"; +import * as commandHelpers from "../src/command-helpers.js"; +import * as log from "../src/log.js"; +import * as changeFiles from "../src/spec-helpers.js"; +import fs from "node:fs"; +import path from "node:path"; +import { LogLevel } from "../src/log.js"; + +function getNormalizedFsCalls(mockFn: Mock): unknown[][] { + return mockFn.mock.calls.map((args: unknown[]) => { + const [filePath, ...rest] = args; + return [String(filePath).replaceAll("\\", "/"), ...rest]; + }); +} + +describe("generateSdkForSingleSpec", () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + test("should execute the SDK generation command successfully", async () => { + const mockCommandInput = { + tspConfigPath: "path/to/tspconfig.yaml", + readmePath: "path/to/readme.md", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "release", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + const mockExecutionReport = { + executionResult: "succeeded", + stagedArtifactsFolder: "path/to/artifacts", + packages: [ + { + packageName: "test-package", + installationInstructions: "npm install test-package", + }, + ], + vsoLogPath: "path/to/log", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(commandHelpers, "getExecutionReport").mockReturnValue(mockExecutionReport); + vi.spyOn(commandHelpers, "setPipelineVariables").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(utils, "runSpecGenSdkCommand").mockResolvedValue(undefined); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + vi.spyOn(fs, "readFileSync").mockReturnValue( + JSON.stringify({ + executionResult: "succeeded", + packages: [{ packageName: "test-package", installationInstructions: "install" }], + }), + ); + vi.spyOn(log, "setVsoVariable").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(utils, "runSpecGenSdkCommand").mockResolvedValueOnce(undefined); + + const result = await generateSdkForSingleSpec(); + expect(result).toBe(0); + expect(log.logMessage).toHaveBeenCalledWith( + `Generating SDK from ${mockCommandInput.tspConfigPath} ${mockCommandInput.readmePath}`, + LogLevel.Group, + ); + expect(log.logMessage).toHaveBeenCalledWith("Runner command executed successfully"); + expect(commandHelpers.setPipelineVariables).toHaveBeenCalledWith( + "path/to/artifacts", + false, + "test-package", + "npm install test-package", + ); + expect(commandHelpers.logIssuesToPipeline).toHaveBeenCalledWith( + mockExecutionReport.vsoLogPath, + `${mockCommandInput.tspConfigPath} ${mockCommandInput.readmePath}`, + ); + }); + + test("should handle errors during SDK generation", async () => { + const mockCommandInput = { + tspConfigPath: "path/to/tspconfig.yaml", + readmePath: "path/to/readme.md", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "setPipelineVariables").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(utils, "runSpecGenSdkCommand").mockRejectedValue(new Error("Command failed")); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const result = await generateSdkForSingleSpec(); + + expect(result).toBe(1); + expect(utils.runSpecGenSdkCommand).toHaveBeenCalled(); + expect(utils.runSpecGenSdkCommand).toHaveBeenCalledWith(["mock-command"]); + expect(log.logMessage).toHaveBeenCalledWith( + `Runner: error executing command:Error: Command failed`, + LogLevel.Error, + ); + expect(commandHelpers.setPipelineVariables).not.toHaveBeenCalled(); + }); + + test("should handle errors during execution report reading", async () => { + const mockCommandInput = { + tspConfigPath: "path/to/tspconfig.yaml", + readmePath: "path/to/readme.md", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "setPipelineVariables").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "getExecutionReport").mockImplementation(() => { + throw new Error("Failed to read execution report"); + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const statusCode = await generateSdkForSingleSpec(); + + expect(statusCode).toBe(1); + expect(log.logMessage).toHaveBeenCalledWith( + "Runner: error reading execution-report.json:Error: Failed to read execution report", + LogLevel.Error, + ); + }); +}); + +describe("generateSdkForSpecPr", () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + test("should execute the SDK generation command for changed specs successfully", async () => { + const mockCommandInput = { + localSdkRepoPath: "path/to/local/repo", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockChangedSpecs = [ + { + specs: [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + ], + typespecProject: "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + readmeMd: "specification/contosowidgetmanager/resource-manager/readme.md", + }, + ]; + const mockExecutionReport = { + executionResult: "succeeded", + packages: [], + vsoLogPath: "path/to/log", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(changeFiles, "detectChangedSpecConfigFiles").mockReturnValue(mockChangedSpecs); + vi.spyOn(utils, "resetGitRepo").mockResolvedValue(undefined); + vi.spyOn(utils, "runSpecGenSdkCommand").mockResolvedValue(undefined); + vi.spyOn(commandHelpers, "getExecutionReport").mockReturnValue(mockExecutionReport); + vi.spyOn(commandHelpers, "getBreakingChangeInfo").mockReturnValue(false); + vi.spyOn(commandHelpers, "generateArtifact").mockReturnValue(0); + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const statusCode = await generateSdkForSpecPr(); + const serviceFolderPath = commandHelpers.getServiceFolderPath( + mockChangedSpecs[0].typespecProject, + ); + expect(statusCode).toBe(0); + expect(log.logMessage).toHaveBeenCalledWith( + `Generating SDK from ${serviceFolderPath}`, + LogLevel.Group, + ); + expect(log.logMessage).toHaveBeenCalledWith(`Runner command executed successfully`); + expect(log.logMessage).toHaveBeenCalledWith( + `Runner command execution result:${mockExecutionReport.executionResult}`, + ); + expect(log.logMessage).toHaveBeenCalledWith("ending group logging", LogLevel.EndGroup); + expect(commandHelpers.logIssuesToPipeline).toHaveBeenCalledWith( + mockExecutionReport.vsoLogPath, + serviceFolderPath, + ); + expect(commandHelpers.generateArtifact).toHaveBeenCalledWith( + mockCommandInput, + "succeeded", // overallExecutionResult + false, // overallRunHasBreakingChange + true, // hasManagementPlaneSpecs + "", // stagedArtifactsFolder + [], // apiViewRequestData + true, // sdkGenerationExecuted + ); + }); + + test("should handle the case when there are no changed specs", async () => { + // Set up mocks + const mockCommandInput = { + localSdkRepoPath: "path/to/local/repo", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "spec-pull-request", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + // Return empty array for changedSpecs + vi.spyOn(changeFiles, "detectChangedSpecConfigFiles").mockReturnValue([]); + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + + // Spy on generateArtifact to verify parameters + const generateArtifactSpy = vi.spyOn(commandHelpers, "generateArtifact").mockReturnValue(0); + + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const statusCode = await generateSdkForSpecPr(); + + expect(statusCode).toBe(0); + // Verify runSpecGenSdkCommand is not called when there are no changed specs + expect(utils.runSpecGenSdkCommand).not.toHaveBeenCalled(); + // Verify correct parameters are passed to generateArtifact + expect(generateArtifactSpy).toHaveBeenCalledWith( + mockCommandInput, + "succeeded", // overallExecutionResult should be set to "succeeded" + false, // overallRunHasBreakingChange + false, // hasManagementPlaneSpecs + "", // stagedArtifactsFolder + [], // apiViewRequestData + false, // sdkGenerationExecuted should be set to false + ); + }); + + test("should skip specs with no valid config files", async () => { + const mockCommandInput = { + localSdkRepoPath: "path/to/local/repo", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockChangedSpecs = [{ specs: [] }]; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(changeFiles, "detectChangedSpecConfigFiles").mockReturnValue(mockChangedSpecs); + vi.spyOn(commandHelpers, "generateArtifact").mockReturnValue(0); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const statusCode = await generateSdkForSpecPr(); + + expect(statusCode).toBe(0); + expect(log.logMessage).toHaveBeenCalledWith( + "Runner: no spec config file found in the changed files", + LogLevel.Warn, + ); + expect(commandHelpers.generateArtifact).toHaveBeenCalledWith( + mockCommandInput, + "", // overallExecutionResult is empty because no spec was actually processed + false, // overallRunHasBreakingChange + false, // hasManagementPlaneSpecs + "", // stagedArtifactsFolder + [], // apiViewRequestData + true, // sdkGenerationExecuted is true because there were some changed specs but they had no valid config + ); + }); + + test("should handle errors during fail run runSpecGenSdkCommand for a changed spec", async () => { + const mockCommandInput = { + localSdkRepoPath: "path/to/local/repo", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockChangedSpecs = [ + { + specs: [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + ], + typespecProject: "path/to/tspconfig.yaml", + readmeMd: "path/to/readme.md", + }, + ]; + + const mockExecutionReport = { + executionResult: "succeeded", + vsoLogPath: "path/to/log", + }; + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(changeFiles, "detectChangedSpecConfigFiles").mockReturnValue(mockChangedSpecs); + vi.spyOn(utils, "runSpecGenSdkCommand").mockRejectedValue(new Error("Command failed")); + vi.spyOn(utils, "resetGitRepo").mockImplementation(() => Promise.resolve()); + vi.spyOn(commandHelpers, "getExecutionReport").mockReturnValue(mockExecutionReport); + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const statusCode = await generateSdkForSpecPr(); + + expect(statusCode).toBe(1); + expect(log.logMessage).toHaveBeenCalledWith( + "Runner: error executing command:Error: Command failed", + LogLevel.Error, + ); + }); + + test("should handle errors during execution report reading for a changed spec", async () => { + const mockCommandInput = { + localSdkRepoPath: "path/to/local/repo", + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + const mockChangedSpecs = [ + { + specs: [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + ], + typespecProject: "path/to/tspconfig.yaml", + readmeMd: "path/to/readme.md", + }, + ]; + + vi.spyOn(commandHelpers, "logIssuesToPipeline").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockCommandInput); + vi.spyOn(commandHelpers, "prepareSpecGenSdkCommand").mockReturnValue(["mock-command"]); + vi.spyOn(changeFiles, "detectChangedSpecConfigFiles").mockReturnValue(mockChangedSpecs); + vi.spyOn(utils, "runSpecGenSdkCommand").mockResolvedValue(undefined); + vi.spyOn(utils, "resetGitRepo").mockImplementation(() => Promise.resolve()); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(commandHelpers, "getExecutionReport").mockImplementation(() => { + throw new Error("Failed to read execution report"); + }); + + const statusCode = await generateSdkForSpecPr(); + + expect(statusCode).toBe(1); + expect(log.logMessage).toHaveBeenCalledWith( + "Runner: error reading execution-report.json:Error: Failed to read execution report", + LogLevel.Error, + ); + }); +}); + +describe("generateSdkForBatchSpecs", () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + test("should handle empty spec paths gracefully", async () => { + const mockBatchType = "all-specs"; + const mockInput = { + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockInput); + vi.spyOn(commandHelpers, "getSpecPaths").mockReturnValue([]); + vi.spyOn(utils, "runSpecGenSdkCommand").mockImplementation(() => Promise.resolve()); + vi.spyOn(utils, "resetGitRepo").mockImplementation(() => Promise.resolve()); + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "writeFileSync").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoAddAttachment").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const code = await generateSdkForBatchSpecs(mockBatchType); + expect(commandHelpers.getSpecPaths).toHaveBeenCalledWith(mockBatchType, "/spec/path"); + expect(code).toBe(0); + expect(utils.runSpecGenSdkCommand).not.toHaveBeenCalled(); + expect(utils.resetGitRepo).not.toHaveBeenCalled(); + const markdownFilePath = path.normalize( + path.join(mockInput.workingFolder, "out/logs/generation-summary.md"), + ); + expect(log.logMessage).toHaveBeenCalledWith( + `Runner: markdown file written to ${markdownFilePath}`, + ); + expect(log.vsoAddAttachment).toHaveBeenCalledWith("Generation Summary", markdownFilePath); + + const calls = getNormalizedFsCalls(fs.writeFileSync as Mock); + expect(calls).toMatchSnapshot(); + }); + + test("should generate SDKs for all specs successfully", async () => { + const mockBatchType = "all-specs"; + const mockSpecPaths = [ + { + tspconfigPath: "typespec1", + readmePath: "readme1", + }, + { + tspconfigPath: "typespec2", + readmePath: "readme2", + }, + { + tspconfigPath: "typespec3", + readmePath: "readme3", + }, + { + tspconfigPath: "typespec4", + readmePath: "readme4", + }, + ]; + const mockExecutionReport = { + executionResult: "succeeded", + stagedArtifactsFolder: "path/to/artifacts", + packages: [], + }; + const mockInput = { + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockInput); + vi.spyOn(commandHelpers, "getSpecPaths").mockReturnValue(mockSpecPaths); + vi.spyOn(utils, "resetGitRepo").mockResolvedValue(undefined); + vi.spyOn(utils, "runSpecGenSdkCommand").mockResolvedValue(undefined); + vi.spyOn(fs, "readFileSync").mockReturnValue(JSON.stringify(mockExecutionReport)); + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "writeFileSync").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoAddAttachment").mockImplementation(() => { + // mock implementation intentionally left blank + }); + + const result = await generateSdkForBatchSpecs(mockBatchType); + expect(result).toBe(0); + expect(commandHelpers.getSpecPaths).toHaveBeenCalledWith(mockBatchType, "/spec/path"); + expect(utils.runSpecGenSdkCommand).toHaveBeenCalledTimes(mockSpecPaths.length); + expect(commandHelpers.setPipelineVariables).toHaveBeenCalledWith("path/to/artifacts"); + const markdownFilePath = path.normalize( + path.join(mockInput.workingFolder, "out/logs/generation-summary.md"), + ); + expect(log.logMessage).toHaveBeenCalledWith( + `Runner: markdown file written to ${markdownFilePath}`, + ); + expect(log.vsoAddAttachment).toHaveBeenCalledWith("Generation Summary", markdownFilePath); + + const calls = getNormalizedFsCalls(fs.writeFileSync as Mock); + expect(calls).toMatchSnapshot(); + }); + + test("should handle errors during SDK generation", async () => { + const mockBatchType = "all-specs"; + const mockSpecPaths = [ + { + tspconfigPath: "typespec1", + readmePath: "readme1", + }, + { + tspconfigPath: "typespec2", + readmePath: "readme2", + }, + ]; + const mockExecutionReport = { + executionResult: "failed", + packages: [], + }; + const mockInput = { + localSpecRepoPath: "/spec/path", + workingFolder: "/working/folder", + runMode: "batch", + localSdkRepoPath: "/sdk/path", + sdkRepoName: "azure-sdk-for-js", + sdkLanguage: "javascript", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + vi.spyOn(commandHelpers, "parseArguments").mockReturnValue(mockInput); + + vi.spyOn(commandHelpers, "getSpecPaths").mockReturnValue(mockSpecPaths); + vi.spyOn(utils, "resetGitRepo").mockResolvedValue(undefined); + vi.spyOn(utils, "runSpecGenSdkCommand") + .mockRejectedValueOnce(new Error("Command failed")) + .mockResolvedValueOnce(undefined); + vi.spyOn(fs, "readFileSync").mockReturnValue(JSON.stringify(mockExecutionReport)); + vi.spyOn(fs, "existsSync").mockReturnValue(false); + vi.spyOn(fs, "writeFileSync").mockImplementation(() => { + // mock implementation intentionally left blank + }); + const logSpy = vi.spyOn(log, "logMessage").mockImplementation(() => { + // mock implementation intentionally left blank + }); + vi.spyOn(log, "vsoAddAttachment").mockImplementation(() => { + // mock implementation intentionally left blank + }); + const result = await generateSdkForBatchSpecs(mockBatchType); + + expect(result).toBe(1); + expect(utils.runSpecGenSdkCommand).toHaveBeenCalledTimes(mockSpecPaths.length); + expect(logSpy).toHaveBeenCalledTimes(11); + const markdownFilePath = path.normalize( + path.join(mockInput.workingFolder, "out/logs/generation-summary.md"), + ); + expect(logSpy).toHaveBeenNthCalledWith( + 1, + `Generating SDK from ${mockSpecPaths[0].tspconfigPath}`, + "group", + ); + expect(logSpy).toHaveBeenNthCalledWith( + 3, + "Runner: error executing command:Error: Command failed", + LogLevel.Error, + ); + expect(logSpy).toHaveBeenNthCalledWith(5, "ending group logging", "endgroup"); + expect(logSpy).toHaveBeenNthCalledWith( + 6, + `Generating SDK from ${mockSpecPaths[1].tspconfigPath}`, + "group", + ); + expect(logSpy).toHaveBeenCalledWith(`Runner: markdown file written to ${markdownFilePath}`); + expect(log.vsoAddAttachment).toHaveBeenCalledWith("Generation Summary", markdownFilePath); + + const calls = getNormalizedFsCalls(fs.writeFileSync as Mock); + expect(calls).toMatchSnapshot(); + logSpy.mockRestore(); + }); +}); diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/main.tsp new file mode 100644 index 000000000000..01df2ce5748a --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/main.tsp @@ -0,0 +1,35 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "@azure-tools/typespec-azure-resource-manager"; +import "./employee.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; +using Azure.ResourceManager; + +/** Microsoft.Contoso Resource Provider management API. */ +@armProviderNamespace +@service(#{ title: "Microsoft.Contoso management service" }) +@versioned(Microsoft.Contoso.Versions) +namespace Microsoft.Contoso; + +/** The available API versions. */ +enum Versions { + /** 2021-10-01-preview version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_10_01_preview: "2021-10-01-preview", + + /** 2021-11-01 version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_11_01: "2021-11-01", +} + +interface Operations extends Azure.ResourceManager.Operations {} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml new file mode 100644 index 000000000000..f89effe0153c --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml @@ -0,0 +1,14 @@ +parameters: + "service-dir": + default: "sdk/contoso" +emit: + - "@azure-tools/typespec-autorest" +options: + "@azure-tools/typespec-autorest": + use-read-only-status-schema: true + emitter-output-dir: "{project-root}/.." + azure-resource-provider-folder: "resource-manager" + output-file: "{azure-resource-provider-folder}/{service-name}/{version-status}/{version}/contoso.json" +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/resource-manager" diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp new file mode 100644 index 000000000000..1b94bb705031 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp @@ -0,0 +1,8 @@ +@doc("Faked shared model") +model FakedSharedModel { + @doc("The tag.") + tag: string; + + @doc("The created date.") + createdAt: utcDateTime; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/client.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/client.tsp new file mode 100644 index 000000000000..c731eeb01215 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/client.tsp @@ -0,0 +1,7 @@ +import "./main.tsp"; +import "@azure-tools/typespec-client-generator-core"; + +using Azure.Contoso.WidgetManager; +using Azure.ClientGenerator.Core; + +@@clientName(Widgets, "ContosoWidgets", "csharp"); diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/examples/2022-11-01-preview/.gitkeep b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/examples/2022-11-01-preview/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/main.tsp new file mode 100644 index 000000000000..3caf321e31e4 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/main.tsp @@ -0,0 +1,65 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "../Contoso.WidgetManager.Shared"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; + +@useAuth(AadOauth2Auth<["https://contoso.azure.com/.default"]>) +@service(#{ title: "Contoso Widget Manager" }) +@versioned(Contoso.WidgetManager.Versions) +namespace Azure.Contoso.WidgetManager; + +@doc("Versions info.") +enum Versions { + @doc("The 2022-11-01-preview version.") + @useDependency(Azure.Core.Versions.v1_0_Preview_1) + v2022_11_01_Preview: "2022-11-01-preview", + + @doc("The 2022-12-01 version.") + @useDependency(Azure.Core.Versions.v1_0_Preview_1) + v2022_12_01: "2022-12-01", +} + +@doc("A widget.") +@resource("widgets") +model WidgetSuite { + @key("widgetName") + @doc("The widget name.") + @visibility(Lifecycle.Read) + name: string; + + @doc("The ID of the widget's manufacturer.") + manufacturerId: string; + + @doc("The faked shared model.") + sharedModel?: FakedSharedModel; +} + +interface Widgets { + @doc("Fetch a Widget by name.") + getWidget is ResourceRead; + + @doc("Gets status of a Widget operation.") + getWidgetOperationStatus is GetResourceOperationStatus; + + @doc("Creates or updates a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + createOrUpdateWidget is StandardResourceOperations.LongRunningResourceCreateOrUpdate; + + @doc("Delete a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + deleteWidget is LongRunningResourceDelete; + + @doc("List Widget resources") + listWidgets is ResourceList< + WidgetSuite, + { + parameters: StandardListQueryParameters; + } + >; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml new file mode 100644 index 000000000000..b1bbd98facce --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml @@ -0,0 +1,18 @@ +parameters: + "service-dir": + default: "sdk/contosowidgetmanager" + "dependencies": + "additionalDirectories": + - "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/" + default: "" +emit: + - "@azure-tools/typespec-autorest" +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/data-plane" +options: + "@azure-tools/typespec-autorest": + azure-resource-provider-folder: "data-plane" + emit-lro-options: "none" + emitter-output-dir: "{project-root}/.." + output-file: "{azure-resource-provider-folder}/{service-name}/{version-status}/{version}/widgets.json" diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json new file mode 100644 index 000000000000..c5066b76f9b9 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json @@ -0,0 +1,525 @@ +{ + "swagger": "2.0", + "info": { + "title": "Contoso Widget Manager", + "version": "2022-11-01-preview", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "AadOauth2Auth": [ + "https://contoso.azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AadOauth2Auth": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "https://contoso.azure.com/.default": "" + }, + "tokenUrl": "https://login.microsoftonline.com/common/oauth2/token" + } + }, + "tags": [], + "paths": { + "/widgets": { + "get": { + "operationId": "Widgets_ListWidgets", + "description": "List Widget resources", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/PagedWidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/widgets/{widgetName}": { + "get": { + "operationId": "Widgets_GetWidget", + "description": "Fetch a Widget by name.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + }, + "patch": { + "operationId": "Widgets_CreateOrUpdateWidget", + "description": "Creates or updates a Widget asynchronously.", + "consumes": [ + "application/merge-patch+json" + ], + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "resource", + "in": "body", + "description": "The resource instance.", + "required": true, + "schema": { + "$ref": "#/definitions/WidgetSuiteCreateOrUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "201": { + "description": "The request has succeeded and a new resource has been created as a result.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "Widgets_DeleteWidget", + "description": "Delete a Widget asynchronously.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "202": { + "description": "The request has been accepted for processing, but processing has not yet completed.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + } + }, + "required": [ + "id", + "status" + ] + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/widgets/{widgetName}/operations/{operationId}": { + "get": { + "operationId": "Widgets_GetWidgetOperationStatus", + "description": "Gets status of a Widget operation.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "in": "path", + "description": "The unique ID of the operation.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + }, + "result": { + "$ref": "#/definitions/WidgetSuite", + "description": "The result of the operation." + } + }, + "required": [ + "id", + "status" + ] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + } + } + } + }, + "definitions": { + "Azure.Core.Foundations.Error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + }, + "target": { + "type": "string", + "description": "The target of the error." + }, + "details": { + "type": "array", + "description": "An array of details about specific errors that led to this reported error.", + "items": { + "$ref": "#/definitions/Azure.Core.Foundations.Error" + }, + "x-ms-identifiers": [] + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "An object containing more specific information than the current object about the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "Azure.Core.Foundations.ErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "Azure.Core.Foundations.InnerError": { + "type": "object", + "description": "An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/Microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "Inner error." + } + } + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "FakedSharedModel": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + }, + "required": [ + "tag", + "createdAt" + ] + }, + "FakedSharedModelCreateOrUpdate": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + } + }, + "PagedWidgetSuite": { + "type": "object", + "description": "Paged collection of WidgetSuite items", + "properties": { + "value": { + "type": "array", + "description": "The WidgetSuite items on this page", + "items": { + "$ref": "#/definitions/WidgetSuite" + }, + "x-ms-identifiers": [] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "WidgetSuite": { + "type": "object", + "description": "A widget.", + "properties": { + "name": { + "type": "string", + "description": "The widget name.", + "readOnly": true + }, + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModel", + "description": "The faked shared model." + } + }, + "required": [ + "name", + "manufacturerId" + ] + }, + "WidgetSuiteCreateOrUpdate": { + "type": "object", + "description": "A widget.", + "properties": { + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModelCreateOrUpdate", + "description": "The faked shared model." + } + } + } + }, + "parameters": { + "Azure.Core.Foundations.ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-client-name": "apiVersion" + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/readme.md new file mode 100644 index 000000000000..49ccc3455366 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/data-plane/readme.md @@ -0,0 +1,54 @@ +# Contoso.WidgetManager + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Contoso.WidgetManager. + +## Configuration + +### Basic Information + +This is a TypeSpec project so we only want to readme to default the default tag and point to the outputted swagger file. +This is used for some tools such as doc generation and swagger apiview generation it isn't used for SDK code gen as we +use the native TypeSpec code generation configured in the tspconfig.yaml file. + +```yaml +openapi-type: data-plane +tag: package-2022-11-01-preview +``` + +### Tag: package-2022-11-01-preview + +These settings apply only when `--tag=package-2022-11-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2022-11-01-preview' +input-file: + - Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Suppress rules that might be fixed + +These set of linting rules we expect to fixed in typespec-autorest emitter but for now suppressing. +Github issue filed at https://github.com/Azure/typespec-azure/issues/2762 + +```yaml +suppressions: + - code: LroExtension + - code: SchemaTypeAndFormat + - code: PathParameterSchema +``` diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json new file mode 100644 index 000000000000..d017cbcbf9f3 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/contoso.json @@ -0,0 +1,531 @@ +{ + "swagger": "2.0", + "info": { + "title": "Microsoft.Contoso management service", + "version": "2021-10-01-preview", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/Microsoft.Contoso/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Contoso/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json new file mode 100644 index 000000000000..9ac7910eb3f7 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json new file mode 100644 index 000000000000..3ee7ff5b9c4f --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-10-01-preview", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/readme.md new file mode 100644 index 000000000000..17dee03ed41e --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/contosowidgetmanager/resource-manager/readme.md @@ -0,0 +1,39 @@ +# containerstorage + +> see https://aka.ms/autorest +This is the AutoRest configuration file for Contoso. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via `npm` (`npm install -g autorest`) and then run: + +> `autorest readme.md` +To see additional help and options, run: + +> `autorest --help` +For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings for the containerstorage. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-10-01-preview +``` + +### Tag: package-2021-10-01-preview + +These settings apply only when `--tag=package-2021-10-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2021-10-01-preview' +input-file: + - Microsoft.Contoso/preview/2021-10-01-preview/contoso.json +``` + +--- diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/client.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/client.tsp new file mode 100644 index 000000000000..be91eaf5c748 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/client.tsp @@ -0,0 +1,7 @@ +import "./main.tsp"; +import "@azure-tools/typespec-client-generator-core"; + +using Widget; +using Azure.ClientGenerator.Core; + +@@clientName(Widgets, "AzureWidgets", "csharp"); diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json new file mode 100644 index 000000000000..dbd333fb52dc --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json @@ -0,0 +1,37 @@ +{ + "title": "Widgets_CreateOrUpdateWidget", + "operationId": "Widgets_CreateOrUpdateWidget", + "parameters": { + "widgetName": "name1", + "api-version": "2022-12-01", + "resource": { + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "responses": { + "200": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "201": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_DeleteWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_DeleteWidgetSample.json new file mode 100644 index 000000000000..c5de7719085d --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_DeleteWidgetSample.json @@ -0,0 +1,29 @@ +{ + "operationId": "Widgets_DeleteWidget", + "title": "Delete widget by widget name using long-running operation.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "202": { + "headers": { + "location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123/result?api-version=2022-12-01", + "operation-location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123?api-version=2022-12-01" + }, + "body": { + "id": "id1", + "status": "deleted" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json new file mode 100644 index 000000000000..a840b19f26e6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json @@ -0,0 +1,45 @@ +{ + "title": "Widgets_GetWidgetOperationStatus", + "operationId": "Widgets_GetWidgetOperationStatus", + "parameters": { + "widgetName": "name1", + "operationId": "opreation id1", + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "id": "opreation id1", + "status": "InProgress", + "error": { + "code": "Error code", + "message": "Error message", + "target": "op1", + "details": [ + { + "code": "code1", + "message": "message1", + "target": "op1", + "details": [], + "innererror": { + "code": "code1" + } + } + ], + "innererror": { + "code": "code1" + } + }, + "result": { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + }, + "widgetName": "rfazvwnfwwomiwrh" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetSample.json new file mode 100644 index 000000000000..ecab18c65303 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_GetWidgetSample.json @@ -0,0 +1,25 @@ +{ + "operationId": "Widgets_GetWidget", + "title": "Get widget by widget name.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "200": { + "body": { + "name": "bingsearch", + "manufacturerId": "a-22-01" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_ListWidgetsSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_ListWidgetsSample.json new file mode 100644 index 000000000000..fa68ab418490 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/examples/2022-12-01/Widgets_ListWidgetsSample.json @@ -0,0 +1,27 @@ +{ + "title": "Widgets_ListWidgets", + "operationId": "Widgets_ListWidgets", + "parameters": { + "top": 8, + "skip": 15, + "maxpagesize": 27, + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/main.tsp new file mode 100644 index 000000000000..a30b708fdb62 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/main.tsp @@ -0,0 +1,61 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "./shared.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; + +@useAuth(AadOauth2Auth<["https://azure.com/.default"]>) +@service(#{ title: "Widget" }) +@versioned(Widget.Versions) +namespace Widget; + +@doc("Versions info.") +enum Versions { + @doc("The 2022-12-01 version.") + @useDependency(Azure.Core.Versions.v1_0_Preview_1) + v2022_12_01: "2022-12-01", +} + +@doc("A widget.") +@resource("widgets") +model WidgetSuite { + @key("widgetName") + @doc("The widget name.") + @visibility(Lifecycle.Read) + name: string; + + @doc("The ID of the widget's manufacturer.") + manufacturerId: string; + + @doc("The faked shared model.") + sharedModel?: FakedSharedModel; +} + +interface Widgets { + @doc("Fetch a Widget by name.") + getWidget is ResourceRead; + + @doc("Gets status of a Widget operation.") + getWidgetOperationStatus is GetResourceOperationStatus; + + @doc("Creates or updates a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + createOrUpdateWidget is StandardResourceOperations.LongRunningResourceCreateOrUpdate; + + @doc("Delete a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + deleteWidget is LongRunningResourceDelete; + + @doc("List Widget resources") + listWidgets is ResourceList< + WidgetSuite, + { + parameters: StandardListQueryParameters; + } + >; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/readme.md new file mode 100644 index 000000000000..6daa7d4c93ee --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/readme.md @@ -0,0 +1,78 @@ +# Widget + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Widget. + +## Configuration + +### Basic Information + +This is a TypeSpec project so we only want to readme to default the default tag and point to the outputted swagger file. +This is used for some tools such as doc generation and swagger apiview generation it isn't used for SDK code gen as we +use the native TypeSpec code generation configured in the tspconfig.yaml file. + +```yaml +openapi-type: data-plane +tag: package-2022-12-01 +``` + +### Tag: package-2022-12-01 + +These settings apply only when `--tag=package-2022-12-01` is specified on the command line. + +```yaml $(tag) == 'package-2022-12-01' +input-file: + - stable/2022-12-01/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Tag: package-2022-11-01-preview + +These settings apply only when `--tag=package-2022-11-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2022-11-01-preview' +input-file: + - preview/2022-11-01-preview/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Suppress rules that might be fixed + +These set of linting rules we expect to fixed in typespec-autorest emitter but for now suppressing. +Github issue filed at https://github.com/Azure/typespec-azure/issues/2762 + +```yaml +suppressions: + - code: LroExtension + - code: SchemaTypeAndFormat + - code: PathParameterSchema +``` diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/shared.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/shared.tsp new file mode 100644 index 000000000000..1b94bb705031 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/shared.tsp @@ -0,0 +1,8 @@ +@doc("Faked shared model") +model FakedSharedModel { + @doc("The tag.") + tag: string; + + @doc("The created date.") + createdAt: utcDateTime; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json new file mode 100644 index 000000000000..dbd333fb52dc --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json @@ -0,0 +1,37 @@ +{ + "title": "Widgets_CreateOrUpdateWidget", + "operationId": "Widgets_CreateOrUpdateWidget", + "parameters": { + "widgetName": "name1", + "api-version": "2022-12-01", + "resource": { + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "responses": { + "200": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "201": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json new file mode 100644 index 000000000000..c5de7719085d --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json @@ -0,0 +1,29 @@ +{ + "operationId": "Widgets_DeleteWidget", + "title": "Delete widget by widget name using long-running operation.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "202": { + "headers": { + "location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123/result?api-version=2022-12-01", + "operation-location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123?api-version=2022-12-01" + }, + "body": { + "id": "id1", + "status": "deleted" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json new file mode 100644 index 000000000000..a840b19f26e6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json @@ -0,0 +1,45 @@ +{ + "title": "Widgets_GetWidgetOperationStatus", + "operationId": "Widgets_GetWidgetOperationStatus", + "parameters": { + "widgetName": "name1", + "operationId": "opreation id1", + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "id": "opreation id1", + "status": "InProgress", + "error": { + "code": "Error code", + "message": "Error message", + "target": "op1", + "details": [ + { + "code": "code1", + "message": "message1", + "target": "op1", + "details": [], + "innererror": { + "code": "code1" + } + } + ], + "innererror": { + "code": "code1" + } + }, + "result": { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + }, + "widgetName": "rfazvwnfwwomiwrh" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetSample.json new file mode 100644 index 000000000000..ecab18c65303 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_GetWidgetSample.json @@ -0,0 +1,25 @@ +{ + "operationId": "Widgets_GetWidget", + "title": "Get widget by widget name.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "200": { + "body": { + "name": "bingsearch", + "manufacturerId": "a-22-01" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json new file mode 100644 index 000000000000..fa68ab418490 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json @@ -0,0 +1,27 @@ +{ + "title": "Widgets_ListWidgets", + "operationId": "Widgets_ListWidgets", + "parameters": { + "top": 8, + "skip": 15, + "maxpagesize": 27, + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/widget.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/widget.json new file mode 100644 index 000000000000..418c1091069f --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/stable/2022-12-01/widget.json @@ -0,0 +1,550 @@ +{ + "swagger": "2.0", + "info": { + "title": "Widget", + "version": "2022-12-01", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "AadOauth2Auth": [ + "https://azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AadOauth2Auth": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "https://azure.com/.default": "" + }, + "tokenUrl": "https://login.microsoftonline.com/common/oauth2/token" + } + }, + "tags": [], + "paths": { + "/widgets": { + "get": { + "operationId": "Widgets_ListWidgets", + "description": "List Widget resources", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/PagedWidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_ListWidgets": { + "$ref": "./examples/Widgets_ListWidgetsSample.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/widgets/{widgetName}": { + "get": { + "operationId": "Widgets_GetWidget", + "description": "Fetch a Widget by name.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get widget by widget name.": { + "$ref": "./examples/Widgets_GetWidgetSample.json" + } + } + }, + "patch": { + "operationId": "Widgets_CreateOrUpdateWidget", + "description": "Creates or updates a Widget asynchronously.", + "consumes": [ + "application/merge-patch+json" + ], + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "resource", + "in": "body", + "description": "The resource instance.", + "required": true, + "schema": { + "$ref": "#/definitions/WidgetSuiteCreateOrUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "201": { + "description": "The request has succeeded and a new resource has been created as a result.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_CreateOrUpdateWidget": { + "$ref": "./examples/Widgets_CreateOrUpdateWidgetSample.json" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "Widgets_DeleteWidget", + "description": "Delete a Widget asynchronously.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "202": { + "description": "The request has been accepted for processing, but processing has not yet completed.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + } + }, + "required": [ + "id", + "status" + ] + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete widget by widget name using long-running operation.": { + "$ref": "./examples/Widgets_DeleteWidgetSample.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/widgets/{widgetName}/operations/{operationId}": { + "get": { + "operationId": "Widgets_GetWidgetOperationStatus", + "description": "Gets status of a Widget operation.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "in": "path", + "description": "The unique ID of the operation.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + }, + "result": { + "$ref": "#/definitions/WidgetSuite", + "description": "The result of the operation." + } + }, + "required": [ + "id", + "status" + ] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_GetWidgetOperationStatus": { + "$ref": "./examples/Widgets_GetWidgetOperationStatusSample.json" + } + } + } + } + }, + "definitions": { + "Azure.Core.Foundations.Error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + }, + "target": { + "type": "string", + "description": "The target of the error." + }, + "details": { + "type": "array", + "description": "An array of details about specific errors that led to this reported error.", + "items": { + "$ref": "#/definitions/Azure.Core.Foundations.Error" + }, + "x-ms-identifiers": [] + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "An object containing more specific information than the current object about the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "Azure.Core.Foundations.ErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "Azure.Core.Foundations.InnerError": { + "type": "object", + "description": "An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md#handling-errors.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "Inner error." + } + } + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "FakedSharedModel": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + }, + "required": [ + "tag", + "createdAt" + ] + }, + "FakedSharedModelCreateOrUpdate": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + } + }, + "PagedWidgetSuite": { + "type": "object", + "description": "Paged collection of WidgetSuite items", + "properties": { + "value": { + "type": "array", + "description": "The WidgetSuite items on this page", + "items": { + "$ref": "#/definitions/WidgetSuite" + }, + "x-ms-identifiers": [] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "WidgetSuite": { + "type": "object", + "description": "A widget.", + "properties": { + "name": { + "type": "string", + "description": "The widget name.", + "readOnly": true + }, + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModel", + "description": "The faked shared model." + } + }, + "required": [ + "name", + "manufacturerId" + ] + }, + "WidgetSuiteCreateOrUpdate": { + "type": "object", + "description": "A widget.", + "properties": { + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModelCreateOrUpdate", + "description": "The faked shared model." + } + } + } + }, + "parameters": { + "Azure.Core.Foundations.ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-client-name": "apiVersion" + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/tspconfig.yaml new file mode 100644 index 000000000000..2de0c6c6c6cf --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/data-plane/widget/tspconfig.yaml @@ -0,0 +1,47 @@ +parameters: + "service-dir": + default: "sdk/widget" + "dependencies": + default: "" +emit: + - "@azure-tools/typespec-autorest" +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/data-plane" +options: + "@azure-tools/typespec-autorest": + # TODO: Does anything need this set, if it's not used in output-file? + azure-resource-provider-folder: "data-plane" + emit-lro-options: "none" + emitter-output-dir: "{project-root}" + output-file: "{version-status}/{version}/widgets.json" + "@azure-tools/typespec-python": + package-dir: "azure-widget" + namespace: "azure.widget" + generate-test: true + generate-sample: true + flavor: azure + "@azure-tools/typespec-csharp": + package-dir: "Azure.Widget" + clear-output-folder: true + model-namespace: false + namespace: "{package-dir}" + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "widget-rest" + package-details: + name: "@azure-rest/azure-widget" + flavor: azure + "@azure-tools/typespec-java": + package-dir: "azure-widget" + namespace: com.azure.widget + flavor: azure + "@azure-tools/typespec-go": + module: "github.com/Azure/azure-sdk-for-go/{service-dir}/{package-dir}" + service-dir: "sdk/widget" + package-dir: "azmanager" + module-version: "0.0.1" + generate-fakes: true + inject-spans: true + single-client: true + slice-elements-byval: true diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/employee.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/employee.tsp new file mode 100644 index 000000000000..d77152e040ec --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/employee.tsp @@ -0,0 +1,63 @@ +import "@typespec/rest"; +import "@typespec/http"; +import "@azure-tools/typespec-azure-core"; +import "@azure-tools/typespec-azure-resource-manager"; + +using TypeSpec.Rest; +using TypeSpec.Http; +using Azure.Core; +using Azure.ResourceManager; + +namespace WidgetManagement; + +/** Employee resource */ +model Employee is TrackedResource { + ...ResourceNameParameter; +} + +/** Employee properties */ +model EmployeeProperties { + /** Age of employee */ + age?: int32; + + /** City of employee */ + city?: string; + + /** Profile of employee */ + @encode("base64url") + profile?: bytes; + + /** The status of the last operation. */ + @visibility(Lifecycle.Read) + provisioningState?: ProvisioningState; +} + +/** The resource provisioning state. */ +@lroStatus +union ProvisioningState { + ResourceProvisioningState, + + /** The resource is being provisioned */ + Provisioning: "Provisioning", + + /** The resource is updating */ + Updating: "Updating", + + /** The resource is being deleted */ + Deleting: "Deleting", + + /** The resource create request has been accepted */ + Accepted: "Accepted", + + string, +} + +@armResourceOperations +interface Employees { + get is ArmResourceRead; + createOrUpdate is ArmResourceCreateOrReplaceAsync; + update is ArmResourcePatchSync; + delete is ArmResourceDeleteWithoutOkAsync; + listByResourceGroup is ArmResourceListByParent; + listBySubscription is ArmListBySubscription; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_CreateOrUpdate.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Delete.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Get.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListByResourceGroup.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListBySubscription.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Update.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Operations_List.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp new file mode 100644 index 000000000000..6a7f5047f36b --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp @@ -0,0 +1,35 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "@azure-tools/typespec-azure-resource-manager"; +import "./employee.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; +using Azure.ResourceManager; + +/** Microsoft.Contoso Resource Provider management API. */ +@armProviderNamespace +@service(#{ title: "WidgetManagement" }) +@versioned(WidgetManagement.Versions) +namespace WidgetManagement; + +/** The available API versions. */ +enum Versions { + /** 2021-10-01-preview version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_10_01_preview: "2021-10-01-preview", + + /** 2021-11-01 version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_11_01: "2021-11-01", +} + +interface Operations extends Azure.ResourceManager.Operations {} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md new file mode 100644 index 000000000000..98556255f8f4 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md @@ -0,0 +1,42 @@ +# WidgetManagement + +> see https://aka.ms/autorest +> This is the AutoRest configuration file for WidgetManagement. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via `npm` (`npm install -g autorest`) and then run: + +> `autorest readme.md` +> To see additional help and options, run: + +> `autorest --help` +> For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - stable/2021-11-01/widgetmanagement.json +suppressions: + - code: PathContainsResourceType + - code: PathResourceProviderMatchNamespace +``` + +--- diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/shared.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/shared.tsp new file mode 100644 index 000000000000..1b94bb705031 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/shared.tsp @@ -0,0 +1,8 @@ +@doc("Faked shared model") +model FakedSharedModel { + @doc("The tag.") + tag: string; + + @doc("The created date.") + createdAt: utcDateTime; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_CreateOrUpdate.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Delete.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Get.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListByResourceGroup.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListBySubscription.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Update.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Operations_List.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/widgetmanagement.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/widgetmanagement.json new file mode 100644 index 000000000000..c8a8b95d6b40 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/widgetmanagement.json @@ -0,0 +1,557 @@ +{ + "swagger": "2.0", + "info": { + "title": "WidgetManagement", + "version": "2021-11-01", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/WidgetManagement/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/WidgetManagement/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/WidgetManagement/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/WidgetManagement/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "title": "Tracked Resource", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml new file mode 100644 index 000000000000..52522ba9eca6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml @@ -0,0 +1,49 @@ +parameters: + "service-dir": + default: "sdk/widgetmanagement" +emit: + - "@azure-tools/typespec-autorest" +options: + "@azure-tools/typespec-autorest": + use-read-only-status-schema: true + emitter-output-dir: "{project-root}" + # TODO: Does anything need this set, if it's not used in output-file? Currently required by TSV. + azure-resource-provider-folder: "resource-manager" + output-file: "{version-status}/{version}/widgetmanagement.json" + arm-types-dir: "{project-root}/../../../../common-types/resource-management" + "@azure-tools/typespec-csharp": + flavor: azure + package-dir: "Azure.ResourceManager.Widget" + clear-output-folder: true + model-namespace: true + namespace: "{package-dir}" + "@azure-tools/typespec-python": + package-dir: "azure-mgmt-widget" + namespace: "azure.mgmt.widget" + generate-test: true + generate-sample: true + flavor: "azure" + "@azure-tools/typespec-java": + package-dir: "azure-resourcemanager-widget" + namespace: "com.azure.resourcemanager.widget" + service-name: "Widget" # human-readable service name, whitespace allowed + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "arm-widget" + flavor: azure + experimental-extensible-enums: true + package-details: + name: "@azure/arm-widget" + "@azure-tools/typespec-go": + service-dir: "sdk/resourcemanager/widget" + package-dir: "armwidget" + module: "github.com/Azure/azure-sdk-for-go/{service-dir}/{package-dir}" + fix-const-stuttering: true + flavor: "azure" + generate-samples: true + generate-fakes: true + head-as-boolean: true + inject-spans: true +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/resource-manager" diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/Microsoft.Service1/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service1/resource-manager/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/client.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/client.tsp new file mode 100644 index 000000000000..be91eaf5c748 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/client.tsp @@ -0,0 +1,7 @@ +import "./main.tsp"; +import "@azure-tools/typespec-client-generator-core"; + +using Widget; +using Azure.ClientGenerator.Core; + +@@clientName(Widgets, "AzureWidgets", "csharp"); diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json new file mode 100644 index 000000000000..dbd333fb52dc --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_CreateOrUpdateWidgetSample.json @@ -0,0 +1,37 @@ +{ + "title": "Widgets_CreateOrUpdateWidget", + "operationId": "Widgets_CreateOrUpdateWidget", + "parameters": { + "widgetName": "name1", + "api-version": "2022-12-01", + "resource": { + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "responses": { + "200": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "201": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_DeleteWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_DeleteWidgetSample.json new file mode 100644 index 000000000000..c5de7719085d --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_DeleteWidgetSample.json @@ -0,0 +1,29 @@ +{ + "operationId": "Widgets_DeleteWidget", + "title": "Delete widget by widget name using long-running operation.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "202": { + "headers": { + "location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123/result?api-version=2022-12-01", + "operation-location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123?api-version=2022-12-01" + }, + "body": { + "id": "id1", + "status": "deleted" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json new file mode 100644 index 000000000000..a840b19f26e6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetOperationStatusSample.json @@ -0,0 +1,45 @@ +{ + "title": "Widgets_GetWidgetOperationStatus", + "operationId": "Widgets_GetWidgetOperationStatus", + "parameters": { + "widgetName": "name1", + "operationId": "opreation id1", + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "id": "opreation id1", + "status": "InProgress", + "error": { + "code": "Error code", + "message": "Error message", + "target": "op1", + "details": [ + { + "code": "code1", + "message": "message1", + "target": "op1", + "details": [], + "innererror": { + "code": "code1" + } + } + ], + "innererror": { + "code": "code1" + } + }, + "result": { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + }, + "widgetName": "rfazvwnfwwomiwrh" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetSample.json new file mode 100644 index 000000000000..ecab18c65303 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_GetWidgetSample.json @@ -0,0 +1,25 @@ +{ + "operationId": "Widgets_GetWidget", + "title": "Get widget by widget name.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "200": { + "body": { + "name": "bingsearch", + "manufacturerId": "a-22-01" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_ListWidgetsSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_ListWidgetsSample.json new file mode 100644 index 000000000000..fa68ab418490 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/examples/2022-12-01/Widgets_ListWidgetsSample.json @@ -0,0 +1,27 @@ +{ + "title": "Widgets_ListWidgets", + "operationId": "Widgets_ListWidgets", + "parameters": { + "top": 8, + "skip": 15, + "maxpagesize": 27, + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/main.tsp new file mode 100644 index 000000000000..a30b708fdb62 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/main.tsp @@ -0,0 +1,61 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "./shared.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; + +@useAuth(AadOauth2Auth<["https://azure.com/.default"]>) +@service(#{ title: "Widget" }) +@versioned(Widget.Versions) +namespace Widget; + +@doc("Versions info.") +enum Versions { + @doc("The 2022-12-01 version.") + @useDependency(Azure.Core.Versions.v1_0_Preview_1) + v2022_12_01: "2022-12-01", +} + +@doc("A widget.") +@resource("widgets") +model WidgetSuite { + @key("widgetName") + @doc("The widget name.") + @visibility(Lifecycle.Read) + name: string; + + @doc("The ID of the widget's manufacturer.") + manufacturerId: string; + + @doc("The faked shared model.") + sharedModel?: FakedSharedModel; +} + +interface Widgets { + @doc("Fetch a Widget by name.") + getWidget is ResourceRead; + + @doc("Gets status of a Widget operation.") + getWidgetOperationStatus is GetResourceOperationStatus; + + @doc("Creates or updates a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + createOrUpdateWidget is StandardResourceOperations.LongRunningResourceCreateOrUpdate; + + @doc("Delete a Widget asynchronously.") + @pollingOperation(Widgets.getWidgetOperationStatus) + deleteWidget is LongRunningResourceDelete; + + @doc("List Widget resources") + listWidgets is ResourceList< + WidgetSuite, + { + parameters: StandardListQueryParameters; + } + >; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/readme.md new file mode 100644 index 000000000000..6daa7d4c93ee --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/readme.md @@ -0,0 +1,78 @@ +# Widget + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Widget. + +## Configuration + +### Basic Information + +This is a TypeSpec project so we only want to readme to default the default tag and point to the outputted swagger file. +This is used for some tools such as doc generation and swagger apiview generation it isn't used for SDK code gen as we +use the native TypeSpec code generation configured in the tspconfig.yaml file. + +```yaml +openapi-type: data-plane +tag: package-2022-12-01 +``` + +### Tag: package-2022-12-01 + +These settings apply only when `--tag=package-2022-12-01` is specified on the command line. + +```yaml $(tag) == 'package-2022-12-01' +input-file: + - stable/2022-12-01/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Tag: package-2022-11-01-preview + +These settings apply only when `--tag=package-2022-11-01-preview` is specified on the command line. + +```yaml $(tag) == 'package-2022-11-01-preview' +input-file: + - preview/2022-11-01-preview/widgets.json +``` + +### Suppress non-TypeSpec SDK related linting rules + +These set of linting rules aren't applicable to the new TypeSpec SDK code generators so suppressing them here. Eventually we will +opt-out these rules from running in the linting tools for TypeSpec generated swagger files. + +```yaml +suppressions: + - code: AvoidAnonymousTypes + - code: PatchInOperationName + - code: OperationIdNounVerb + - code: RequiredReadOnlyProperties + - code: SchemaNamesConvention + - code: SchemaDescriptionOrTitle +``` + +### Suppress rules that might be fixed + +These set of linting rules we expect to fixed in typespec-autorest emitter but for now suppressing. +Github issue filed at https://github.com/Azure/typespec-azure/issues/2762 + +```yaml +suppressions: + - code: LroExtension + - code: SchemaTypeAndFormat + - code: PathParameterSchema +``` diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/shared.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/shared.tsp new file mode 100644 index 000000000000..1b94bb705031 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/shared.tsp @@ -0,0 +1,8 @@ +@doc("Faked shared model") +model FakedSharedModel { + @doc("The tag.") + tag: string; + + @doc("The created date.") + createdAt: utcDateTime; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json new file mode 100644 index 000000000000..dbd333fb52dc --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_CreateOrUpdateWidgetSample.json @@ -0,0 +1,37 @@ +{ + "title": "Widgets_CreateOrUpdateWidget", + "operationId": "Widgets_CreateOrUpdateWidget", + "parameters": { + "widgetName": "name1", + "api-version": "2022-12-01", + "resource": { + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "responses": { + "200": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + }, + "201": { + "body": { + "name": "name1", + "manufacturerId": "manufacturer id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json new file mode 100644 index 000000000000..c5de7719085d --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_DeleteWidgetSample.json @@ -0,0 +1,29 @@ +{ + "operationId": "Widgets_DeleteWidget", + "title": "Delete widget by widget name using long-running operation.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "202": { + "headers": { + "location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123/result?api-version=2022-12-01", + "operation-location": "https://contosowidgetmanager.azure.com/operations/00000000-0000-0000-0000-000000000123?api-version=2022-12-01" + }, + "body": { + "id": "id1", + "status": "deleted" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json new file mode 100644 index 000000000000..a840b19f26e6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetOperationStatusSample.json @@ -0,0 +1,45 @@ +{ + "title": "Widgets_GetWidgetOperationStatus", + "operationId": "Widgets_GetWidgetOperationStatus", + "parameters": { + "widgetName": "name1", + "operationId": "opreation id1", + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "id": "opreation id1", + "status": "InProgress", + "error": { + "code": "Error code", + "message": "Error message", + "target": "op1", + "details": [ + { + "code": "code1", + "message": "message1", + "target": "op1", + "details": [], + "innererror": { + "code": "code1" + } + } + ], + "innererror": { + "code": "code1" + } + }, + "result": { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + }, + "widgetName": "rfazvwnfwwomiwrh" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetSample.json new file mode 100644 index 000000000000..ecab18c65303 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_GetWidgetSample.json @@ -0,0 +1,25 @@ +{ + "operationId": "Widgets_GetWidget", + "title": "Get widget by widget name.", + "parameters": { + "api-version": "2022-12-01", + "widgetName": "searchbox" + }, + "responses": { + "200": { + "body": { + "name": "bingsearch", + "manufacturerId": "a-22-01" + } + }, + "default": { + "body": { + "error": { + "code": "Error code", + "message": "Error message", + "details": [] + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json new file mode 100644 index 000000000000..fa68ab418490 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/examples/Widgets_ListWidgetsSample.json @@ -0,0 +1,27 @@ +{ + "title": "Widgets_ListWidgets", + "operationId": "Widgets_ListWidgets", + "parameters": { + "top": 8, + "skip": 15, + "maxpagesize": 27, + "api-version": "2022-12-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "bingsearch", + "manufacturerId": "manufacturer Id1", + "sharedModel": { + "tag": "tag1", + "createdAt": "2023-01-09T02:12:25.689Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/widget.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/widget.json new file mode 100644 index 000000000000..418c1091069f --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/stable/2022-12-01/widget.json @@ -0,0 +1,550 @@ +{ + "swagger": "2.0", + "info": { + "title": "Widget", + "version": "2022-12-01", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "AadOauth2Auth": [ + "https://azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AadOauth2Auth": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "https://azure.com/.default": "" + }, + "tokenUrl": "https://login.microsoftonline.com/common/oauth2/token" + } + }, + "tags": [], + "paths": { + "/widgets": { + "get": { + "operationId": "Widgets_ListWidgets", + "description": "List Widget resources", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/PagedWidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_ListWidgets": { + "$ref": "./examples/Widgets_ListWidgetsSample.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/widgets/{widgetName}": { + "get": { + "operationId": "Widgets_GetWidget", + "description": "Fetch a Widget by name.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get widget by widget name.": { + "$ref": "./examples/Widgets_GetWidgetSample.json" + } + } + }, + "patch": { + "operationId": "Widgets_CreateOrUpdateWidget", + "description": "Creates or updates a Widget asynchronously.", + "consumes": [ + "application/merge-patch+json" + ], + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "resource", + "in": "body", + "description": "The resource instance.", + "required": true, + "schema": { + "$ref": "#/definitions/WidgetSuiteCreateOrUpdate" + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "201": { + "description": "The request has succeeded and a new resource has been created as a result.", + "schema": { + "$ref": "#/definitions/WidgetSuite" + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_CreateOrUpdateWidget": { + "$ref": "./examples/Widgets_CreateOrUpdateWidgetSample.json" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "Widgets_DeleteWidget", + "description": "Delete a Widget asynchronously.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + } + ], + "responses": { + "202": { + "description": "The request has been accepted for processing, but processing has not yet completed.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + } + }, + "required": [ + "id", + "status" + ] + }, + "headers": { + "Operation-Location": { + "type": "string", + "format": "uri", + "description": "The location for monitoring the operation state." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete widget by widget name using long-running operation.": { + "$ref": "./examples/Widgets_DeleteWidgetSample.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/widgets/{widgetName}/operations/{operationId}": { + "get": { + "operationId": "Widgets_GetWidgetOperationStatus", + "description": "Gets status of a Widget operation.", + "parameters": [ + { + "$ref": "#/parameters/Azure.Core.Foundations.ApiVersionParameter" + }, + { + "name": "widgetName", + "in": "path", + "description": "The widget name.", + "required": true, + "type": "string" + }, + { + "name": "operationId", + "in": "path", + "description": "The unique ID of the operation.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "object", + "description": "Provides status details for long running operations.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the operation." + }, + "status": { + "$ref": "#/definitions/Azure.Core.Foundations.OperationState", + "description": "The status of the operation" + }, + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "Error object that describes the error when status is \"Failed\"." + }, + "result": { + "$ref": "#/definitions/WidgetSuite", + "description": "The result of the operation." + } + }, + "required": [ + "id", + "status" + ] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/Azure.Core.Foundations.ErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Widgets_GetWidgetOperationStatus": { + "$ref": "./examples/Widgets_GetWidgetOperationStatusSample.json" + } + } + } + } + }, + "definitions": { + "Azure.Core.Foundations.Error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + }, + "target": { + "type": "string", + "description": "The target of the error." + }, + "details": { + "type": "array", + "description": "An array of details about specific errors that led to this reported error.", + "items": { + "$ref": "#/definitions/Azure.Core.Foundations.Error" + }, + "x-ms-identifiers": [] + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "An object containing more specific information than the current object about the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "Azure.Core.Foundations.ErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/Azure.Core.Foundations.Error", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "Azure.Core.Foundations.InnerError": { + "type": "object", + "description": "An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md#handling-errors.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "innererror": { + "$ref": "#/definitions/Azure.Core.Foundations.InnerError", + "description": "Inner error." + } + } + }, + "Azure.Core.Foundations.OperationState": { + "type": "string", + "description": "Enum describing allowed operation states.", + "enum": [ + "NotStarted", + "Running", + "Succeeded", + "Failed", + "Canceled" + ], + "x-ms-enum": { + "name": "OperationState", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "The operation has not started." + }, + { + "name": "Running", + "value": "Running", + "description": "The operation is in progress." + }, + { + "name": "Succeeded", + "value": "Succeeded", + "description": "The operation has completed successfully." + }, + { + "name": "Failed", + "value": "Failed", + "description": "The operation has failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "The operation has been canceled by the user." + } + ] + } + }, + "FakedSharedModel": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + }, + "required": [ + "tag", + "createdAt" + ] + }, + "FakedSharedModelCreateOrUpdate": { + "type": "object", + "description": "Faked shared model", + "properties": { + "tag": { + "type": "string", + "description": "The tag." + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The created date." + } + } + }, + "PagedWidgetSuite": { + "type": "object", + "description": "Paged collection of WidgetSuite items", + "properties": { + "value": { + "type": "array", + "description": "The WidgetSuite items on this page", + "items": { + "$ref": "#/definitions/WidgetSuite" + }, + "x-ms-identifiers": [] + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "WidgetSuite": { + "type": "object", + "description": "A widget.", + "properties": { + "name": { + "type": "string", + "description": "The widget name.", + "readOnly": true + }, + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModel", + "description": "The faked shared model." + } + }, + "required": [ + "name", + "manufacturerId" + ] + }, + "WidgetSuiteCreateOrUpdate": { + "type": "object", + "description": "A widget.", + "properties": { + "manufacturerId": { + "type": "string", + "description": "The ID of the widget's manufacturer." + }, + "sharedModel": { + "$ref": "#/definitions/FakedSharedModelCreateOrUpdate", + "description": "The faked shared model." + } + } + } + }, + "parameters": { + "Azure.Core.Foundations.ApiVersionParameter": { + "name": "api-version", + "in": "query", + "description": "The API version to use for this operation.", + "required": true, + "type": "string", + "minLength": 1, + "x-ms-parameter-location": "method", + "x-ms-client-name": "apiVersion" + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/tspconfig.yaml new file mode 100644 index 000000000000..2de0c6c6c6cf --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/data-plane/widget2/tspconfig.yaml @@ -0,0 +1,47 @@ +parameters: + "service-dir": + default: "sdk/widget" + "dependencies": + default: "" +emit: + - "@azure-tools/typespec-autorest" +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/data-plane" +options: + "@azure-tools/typespec-autorest": + # TODO: Does anything need this set, if it's not used in output-file? + azure-resource-provider-folder: "data-plane" + emit-lro-options: "none" + emitter-output-dir: "{project-root}" + output-file: "{version-status}/{version}/widgets.json" + "@azure-tools/typespec-python": + package-dir: "azure-widget" + namespace: "azure.widget" + generate-test: true + generate-sample: true + flavor: azure + "@azure-tools/typespec-csharp": + package-dir: "Azure.Widget" + clear-output-folder: true + model-namespace: false + namespace: "{package-dir}" + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "widget-rest" + package-details: + name: "@azure-rest/azure-widget" + flavor: azure + "@azure-tools/typespec-java": + package-dir: "azure-widget" + namespace: com.azure.widget + flavor: azure + "@azure-tools/typespec-go": + module: "github.com/Azure/azure-sdk-for-go/{service-dir}/{package-dir}" + service-dir: "sdk/widget" + package-dir: "azmanager" + module-version: "0.0.1" + generate-fakes: true + inject-spans: true + single-client: true + slice-elements-byval: true diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/employee.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/employee.tsp new file mode 100644 index 000000000000..d77152e040ec --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/employee.tsp @@ -0,0 +1,63 @@ +import "@typespec/rest"; +import "@typespec/http"; +import "@azure-tools/typespec-azure-core"; +import "@azure-tools/typespec-azure-resource-manager"; + +using TypeSpec.Rest; +using TypeSpec.Http; +using Azure.Core; +using Azure.ResourceManager; + +namespace WidgetManagement; + +/** Employee resource */ +model Employee is TrackedResource { + ...ResourceNameParameter; +} + +/** Employee properties */ +model EmployeeProperties { + /** Age of employee */ + age?: int32; + + /** City of employee */ + city?: string; + + /** Profile of employee */ + @encode("base64url") + profile?: bytes; + + /** The status of the last operation. */ + @visibility(Lifecycle.Read) + provisioningState?: ProvisioningState; +} + +/** The resource provisioning state. */ +@lroStatus +union ProvisioningState { + ResourceProvisioningState, + + /** The resource is being provisioned */ + Provisioning: "Provisioning", + + /** The resource is updating */ + Updating: "Updating", + + /** The resource is being deleted */ + Deleting: "Deleting", + + /** The resource create request has been accepted */ + Accepted: "Accepted", + + string, +} + +@armResourceOperations +interface Employees { + get is ArmResourceRead; + createOrUpdate is ArmResourceCreateOrReplaceAsync; + update is ArmResourcePatchSync; + delete is ArmResourceDeleteWithoutOkAsync; + listByResourceGroup is ArmResourceListByParent; + listBySubscription is ArmListBySubscription; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_CreateOrUpdate.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Delete.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Get.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListByResourceGroup.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListBySubscription.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Update.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Operations_List.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/examples/2021-11-01/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/main.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/main.tsp new file mode 100644 index 000000000000..6a7f5047f36b --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/main.tsp @@ -0,0 +1,35 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "@azure-tools/typespec-azure-resource-manager"; +import "./employee.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; +using Azure.Core; +using Azure.ResourceManager; + +/** Microsoft.Contoso Resource Provider management API. */ +@armProviderNamespace +@service(#{ title: "WidgetManagement" }) +@versioned(WidgetManagement.Versions) +namespace WidgetManagement; + +/** The available API versions. */ +enum Versions { + /** 2021-10-01-preview version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_10_01_preview: "2021-10-01-preview", + + /** 2021-11-01 version */ + @useDependency(Azure.ResourceManager.Versions.v1_0_Preview_1) + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @armCommonTypesVersion(Azure.ResourceManager.CommonTypes.Versions.v5) + v2021_11_01: "2021-11-01", +} + +interface Operations extends Azure.ResourceManager.Operations {} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/readme.md new file mode 100644 index 000000000000..98556255f8f4 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/readme.md @@ -0,0 +1,42 @@ +# WidgetManagement + +> see https://aka.ms/autorest +> This is the AutoRest configuration file for WidgetManagement. + +## Getting Started + +To build the SDKs for My API, simply install AutoRest via `npm` (`npm install -g autorest`) and then run: + +> `autorest readme.md` +> To see additional help and options, run: + +> `autorest --help` +> For other options on installation see [Installing AutoRest](https://aka.ms/autorest/install) on the AutoRest github page. + +--- + +## Configuration + +### Basic Information + +These are the global settings. + +```yaml +openapi-type: arm +openapi-subtype: rpaas +tag: package-2021-11-01 +``` + +### Tag: package-2021-11-01 + +These settings apply only when `--tag=package-2021-11-01` is specified on the command line. + +```yaml $(tag) == 'package-2021-11-01' +input-file: + - stable/2021-11-01/widgetmanagement.json +suppressions: + - code: PathContainsResourceType + - code: PathResourceProviderMatchNamespace +``` + +--- diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/shared.tsp b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/shared.tsp new file mode 100644 index 000000000000..1b94bb705031 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/shared.tsp @@ -0,0 +1,8 @@ +@doc("Faked shared model") +model FakedSharedModel { + @doc("The tag.") + tag: string; + + @doc("The created date.") + createdAt: utcDateTime; +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_CreateOrUpdate.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_CreateOrUpdate.json new file mode 100644 index 000000000000..4a13a329e3b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_CreateOrUpdate.json @@ -0,0 +1,76 @@ +{ + "title": "Employees_CreateOrUpdate", + "operationId": "Employees_CreateOrUpdate", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "9KF-f-8b", + "resource": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl" + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + }, + "201": { + "headers": { + "Azure-AsyncOperation": "https://contoso.com/operationstatus" + }, + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/9KF-f-8b", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Delete.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Delete.json new file mode 100644 index 000000000000..15176d86b029 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Delete.json @@ -0,0 +1,19 @@ +{ + "title": "Employees_Delete", + "operationId": "Employees_Delete", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "5vX--BxSu3ux48rI4O9OQ569" + }, + "responses": { + "202": { + "headers": { + "Retry-After": 30, + "location": "https://contoso.com/operationstatus" + } + }, + "204": {} + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Get.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Get.json new file mode 100644 index 000000000000..eb1917859e24 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Get.json @@ -0,0 +1,37 @@ +{ + "title": "Employees_Get", + "operationId": "Employees_Get", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "le-8MU--J3W6q8D386p3-iT3" + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/le-8MU--J3W6q8D386p3-iT3", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListByResourceGroup.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListByResourceGroup.json new file mode 100644 index 000000000000..860fab85a9b8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListByResourceGroup.json @@ -0,0 +1,41 @@ +{ + "title": "Employees_ListByResourceGroup", + "operationId": "Employees_ListByResourceGroup", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListBySubscription.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListBySubscription.json new file mode 100644 index 000000000000..18432d58de37 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_ListBySubscription.json @@ -0,0 +1,40 @@ +{ + "title": "Employees_ListBySubscription", + "operationId": "Employees_ListBySubscription", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/rgopenapi/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + ], + "nextLink": "https://microsoft.com/a" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Update.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Update.json new file mode 100644 index 000000000000..de46fc8ef2e8 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Employees_Update.json @@ -0,0 +1,47 @@ +{ + "title": "Employees_Update", + "operationId": "Employees_Update", + "parameters": { + "api-version": "2021-11-01", + "subscriptionId": "11809CA1-E126-4017-945E-AA795CD5C5A9", + "resourceGroupName": "rgopenapi", + "employeeName": "-XhyNJ--", + "properties": { + "tags": { + "key7952": "no" + }, + "properties": { + "age": 24, + "city": "uyfg", + "profile": "oapgijcswfkruiuuzbwco" + } + } + }, + "responses": { + "200": { + "body": { + "properties": { + "age": 30, + "city": "gydhnntudughbmxlkyzrskcdkotrxn", + "profile": "ms", + "provisioningState": "Succeeded" + }, + "tags": { + "key2913": "urperxmkkhhkp" + }, + "location": "itajgxyqozseoygnl", + "id": "/subscriptions/11809CA1-E126-4017-945E-AA795CD5C5A9/resourceGroups/contoso/providers/Microsoft.Contoso/employees/test", + "name": "xepyxhpb", + "type": "svvamxrdnnv", + "systemData": { + "createdBy": "iewyxsnriqktsvp", + "createdByType": "User", + "createdAt": "2023-05-19T00:28:48.610Z", + "lastModifiedBy": "xrchbnnuzierzpxw", + "lastModifiedByType": "User", + "lastModifiedAt": "2023-05-19T00:28:48.610Z" + } + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Operations_List.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Operations_List.json new file mode 100644 index 000000000000..4d74e755c020 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/examples/Operations_List.json @@ -0,0 +1,28 @@ +{ + "title": "Operations_List", + "operationId": "Operations_List", + "parameters": { + "api-version": "2021-11-01" + }, + "responses": { + "200": { + "body": { + "value": [ + { + "name": "ymeow", + "isDataAction": true, + "display": { + "provider": "qxyznq", + "resource": "bqfwkox", + "operation": "td", + "description": "yvgkhsuwartgxb" + }, + "origin": "user", + "actionType": "Internal" + } + ], + "nextLink": "https://sample.com/nextLink" + } + } + } +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/widgetmanagement.json b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/widgetmanagement.json new file mode 100644 index 000000000000..c8a8b95d6b40 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/stable/2021-11-01/widgetmanagement.json @@ -0,0 +1,557 @@ +{ + "swagger": "2.0", + "info": { + "title": "WidgetManagement", + "version": "2021-11-01", + "description": "Microsoft.Contoso Resource Provider management API.", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "host": "management.azure.com", + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "description": "Azure Active Directory OAuth2 Flow.", + "flow": "implicit", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "tags": [ + { + "name": "Operations" + }, + { + "name": "Employees" + } + ], + "paths": { + "/providers/WidgetManagement/operations": { + "get": { + "operationId": "Operations_List", + "tags": [ + "Operations" + ], + "description": "List the operations for the provider", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/OperationListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Operations_List": { + "$ref": "./examples/Operations_List.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/WidgetManagement/employees": { + "get": { + "operationId": "Employees_ListBySubscription", + "tags": [ + "Employees" + ], + "description": "List Employee resources by subscription ID", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListBySubscription": { + "$ref": "./examples/Employees_ListBySubscription.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/WidgetManagement/employees": { + "get": { + "operationId": "Employees_ListByResourceGroup", + "tags": [ + "Employees" + ], + "description": "List Employee resources by resource group", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/EmployeeListResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_ListByResourceGroup": { + "$ref": "./examples/Employees_ListByResourceGroup.json" + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/WidgetManagement/employees/{employeeName}": { + "get": { + "operationId": "Employees_Get", + "tags": [ + "Employees" + ], + "description": "Get a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Get": { + "$ref": "./examples/Employees_Get.json" + } + } + }, + "put": { + "operationId": "Employees_CreateOrUpdate", + "tags": [ + "Employees" + ], + "description": "Create a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "resource", + "in": "body", + "description": "Resource create parameters.", + "required": true, + "schema": { + "$ref": "#/definitions/Employee" + } + } + ], + "responses": { + "200": { + "description": "Resource 'Employee' update operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "201": { + "description": "Resource 'Employee' create operation succeeded", + "schema": { + "$ref": "#/definitions/Employee" + }, + "headers": { + "Azure-AsyncOperation": { + "type": "string", + "description": "A link to the status monitor" + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_CreateOrUpdate": { + "$ref": "./examples/Employees_CreateOrUpdate.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "azure-async-operation" + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "Employees_Update", + "tags": [ + "Employees" + ], + "description": "Update a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + }, + { + "name": "properties", + "in": "body", + "description": "The resource properties to be updated.", + "required": true, + "schema": { + "$ref": "#/definitions/EmployeeUpdate" + } + } + ], + "responses": { + "200": { + "description": "Azure operation completed successfully.", + "schema": { + "$ref": "#/definitions/Employee" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Update": { + "$ref": "./examples/Employees_Update.json" + } + } + }, + "delete": { + "operationId": "Employees_Delete", + "tags": [ + "Employees" + ], + "description": "Delete a Employee", + "parameters": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "name": "employeeName", + "in": "path", + "description": "The name of the Employee", + "required": true, + "type": "string", + "pattern": "^[a-zA-Z0-9-]{3,24}$" + } + ], + "responses": { + "202": { + "description": "Resource deletion accepted.", + "headers": { + "Location": { + "type": "string", + "description": "The Location header contains the URL where the status of the long running operation can be checked." + }, + "Retry-After": { + "type": "integer", + "format": "int32", + "description": "The Retry-After header can indicate how long the client should wait before polling the operation status." + } + } + }, + "204": { + "description": "Resource does not exist." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "Employees_Delete": { + "$ref": "./examples/Employees_Delete.json" + } + }, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + }, + "x-ms-long-running-operation": true + } + } + }, + "definitions": { + "Azure.ResourceManager.CommonTypes.TrackedResourceUpdate": { + "type": "object", + "title": "Tracked Resource", + "description": "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'", + "properties": { + "tags": { + "type": "object", + "description": "Resource tags.", + "additionalProperties": { + "type": "string" + } + } + }, + "allOf": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/Resource" + } + ] + }, + "Employee": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "../../../../../../common-types/resource-management/v5/types.json#/definitions/TrackedResource" + } + ] + }, + "EmployeeListResult": { + "type": "object", + "description": "The response of a Employee list operation.", + "properties": { + "value": { + "type": "array", + "description": "The Employee items on this page", + "items": { + "$ref": "#/definitions/Employee" + } + }, + "nextLink": { + "type": "string", + "format": "uri", + "description": "The link to the next page of items" + } + }, + "required": [ + "value" + ] + }, + "EmployeeProperties": { + "type": "object", + "description": "Employee properties", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "description": "Age of employee" + }, + "city": { + "type": "string", + "description": "City of employee" + }, + "profile": { + "type": "string", + "format": "base64url", + "description": "Profile of employee" + }, + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "The status of the last operation.", + "readOnly": true + } + } + }, + "EmployeeUpdate": { + "type": "object", + "description": "Employee resource", + "properties": { + "properties": { + "$ref": "#/definitions/EmployeeProperties", + "description": "The resource-specific properties for this resource." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Azure.ResourceManager.CommonTypes.TrackedResourceUpdate" + } + ] + }, + "ProvisioningState": { + "type": "string", + "description": "The resource provisioning state.", + "enum": [ + "Succeeded", + "Failed", + "Canceled", + "Provisioning", + "Updating", + "Deleting", + "Accepted" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true, + "values": [ + { + "name": "Succeeded", + "value": "Succeeded", + "description": "Resource has been created." + }, + { + "name": "Failed", + "value": "Failed", + "description": "Resource creation failed." + }, + { + "name": "Canceled", + "value": "Canceled", + "description": "Resource creation was canceled." + }, + { + "name": "Provisioning", + "value": "Provisioning", + "description": "The resource is being provisioned" + }, + { + "name": "Updating", + "value": "Updating", + "description": "The resource is updating" + }, + { + "name": "Deleting", + "value": "Deleting", + "description": "The resource is being deleted" + }, + { + "name": "Accepted", + "value": "Accepted", + "description": "The resource create request has been accepted" + } + ] + }, + "readOnly": true + } + }, + "parameters": {} +} diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/tspconfig.yaml b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/tspconfig.yaml new file mode 100644 index 000000000000..52522ba9eca6 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/WidgetManagement2/tspconfig.yaml @@ -0,0 +1,49 @@ +parameters: + "service-dir": + default: "sdk/widgetmanagement" +emit: + - "@azure-tools/typespec-autorest" +options: + "@azure-tools/typespec-autorest": + use-read-only-status-schema: true + emitter-output-dir: "{project-root}" + # TODO: Does anything need this set, if it's not used in output-file? Currently required by TSV. + azure-resource-provider-folder: "resource-manager" + output-file: "{version-status}/{version}/widgetmanagement.json" + arm-types-dir: "{project-root}/../../../../common-types/resource-management" + "@azure-tools/typespec-csharp": + flavor: azure + package-dir: "Azure.ResourceManager.Widget" + clear-output-folder: true + model-namespace: true + namespace: "{package-dir}" + "@azure-tools/typespec-python": + package-dir: "azure-mgmt-widget" + namespace: "azure.mgmt.widget" + generate-test: true + generate-sample: true + flavor: "azure" + "@azure-tools/typespec-java": + package-dir: "azure-resourcemanager-widget" + namespace: "com.azure.resourcemanager.widget" + service-name: "Widget" # human-readable service name, whitespace allowed + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "arm-widget" + flavor: azure + experimental-extensible-enums: true + package-details: + name: "@azure/arm-widget" + "@azure-tools/typespec-go": + service-dir: "sdk/resourcemanager/widget" + package-dir: "armwidget" + module: "github.com/Azure/azure-sdk-for-go/{service-dir}/{package-dir}" + fix-const-stuttering: true + flavor: "azure" + generate-samples: true + generate-fakes: true + head-as-boolean: true + inject-spans: true +linter: + extends: + - "@azure-tools/typespec-azure-rulesets/resource-manager" diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/Microsoft.Service2/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/readme.md b/eng/tools/spec-gen-sdk-runner/test/fixtures/specification/service2/resource-manager/readme.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/eng/tools/spec-gen-sdk-runner/test/log.test.ts b/eng/tools/spec-gen-sdk-runner/test/log.test.ts new file mode 100644 index 000000000000..60c3fb569262 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/log.test.ts @@ -0,0 +1,98 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from "vitest"; +import { + logMessage, + LogLevel, + LogIssueType, + vsoAddAttachment, + vsoLogIssue, + setVsoVariable, +} from "../src/log.js"; + +const logSpy = vi.spyOn(console, "log").mockImplementation(() => { + // mock implementation intentionally left blank +}); + +const errorSpy = vi.spyOn(console, "error").mockImplementation(() => { + // mock implementation intentionally left blank +}); + +const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => { + // mock implementation intentionally left blank +}); + +describe("logMessage", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + test("logs a normal message", () => { + logMessage("Hello"); + expect(logSpy).toHaveBeenCalledWith("Hello"); + }); + + test("logs a group message", () => { + logMessage("Group", LogLevel.Group); + expect(logSpy).toHaveBeenCalledWith("##[group]Group"); + }); + + test("logs an endgroup message", () => { + logMessage("End", LogLevel.EndGroup); + expect(logSpy).toHaveBeenCalledWith("##[endgroup]"); + }); + + test("logs a debug message", () => { + logMessage("Debug", LogLevel.Debug); + expect(logSpy).toHaveBeenCalledWith("[debug]Debug"); + }); + + test("logs a warning", () => { + logMessage("Warning", LogLevel.Warn); + expect(warnSpy).toHaveBeenCalledWith("Warning"); + }); + + test("logs an error", () => { + logMessage("Error", LogLevel.Error); + expect(errorSpy).toHaveBeenCalledWith("Error"); + }); +}); + +describe("vso helpers", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + test("vsoAddAttachment should log correct format", () => { + vsoAddAttachment("MyReport", "/path/to/report.md"); + expect(logSpy).toHaveBeenCalledWith( + "##vso[task.addattachment type=Distributedtask.Core.Summary;name=MyReport;]/path/to/report.md", + ); + }); + + test("vsoLogIssue should default to error type", () => { + vsoLogIssue("Something went wrong"); + expect(logSpy).toHaveBeenCalledWith("##vso[task.logissue type=error]Something went wrong"); + }); + + test("vsoLogIssue should support custom type", () => { + vsoLogIssue("Just a warning", LogIssueType.Warning); + expect(logSpy).toHaveBeenCalledWith("##vso[task.logissue type=warning]Just a warning"); + }); + + test("setVsoVariable should set regular variable", () => { + setVsoVariable("buildNumber", "1234"); + expect(logSpy).toHaveBeenCalledWith("##vso[task.setVariable variable=buildNumber]1234"); + }); + + test("setVsoVariable should set output variable", () => { + setVsoVariable("result", "OK", true); + expect(logSpy).toHaveBeenCalledWith("##vso[task.setVariable variable=result;isoutput=true]OK"); + }); +}); diff --git a/eng/tools/spec-gen-sdk-runner/test/spec-helpers.test.ts b/eng/tools/spec-gen-sdk-runner/test/spec-helpers.test.ts new file mode 100644 index 000000000000..144aed93b518 --- /dev/null +++ b/eng/tools/spec-gen-sdk-runner/test/spec-helpers.test.ts @@ -0,0 +1,737 @@ +import { describe, test, expect, vi, beforeEach } from "vitest"; +import { + detectChangedSpecConfigFiles, + groupSpecConfigPaths, + processTypeSpecProjectsV2FolderStructure, +} from "../src/spec-helpers.js"; +import { SpecGenSdkCmdInput } from "../src/types.js"; +import { fileURLToPath } from "node:url"; +import path from "node:path"; +import { + type ChangedSpecs, + type SpecConfigs, + normalizePath, + getChangedFiles, +} from "../src/utils.js"; + +vi.mock("../src/utils.js", async () => { + const actual = await vi.importActual("../src/utils.js"); + + return { + ...actual, + getChangedFiles: vi.fn(), + }; +}); + +function normalizeResultItem(item: ChangedSpecs): ChangedSpecs { + return { + specs: item.specs.map((path) => normalizePath(path)), + ...(item.readmeMd ? { readmeMd: normalizePath(item.readmeMd) } : {}), + ...(item.typespecProject ? { typespecProject: normalizePath(item.typespecProject) } : {}), + }; +} + +/** + * Normalizes all path properties in an array of SpecConfigs objects + * @param configsArray - Array of SpecConfigs objects to normalize + * @returns A new array with normalized path properties + */ +function normalizeSpecConfigsArray(configsArray: SpecConfigs[]): SpecConfigs[] { + return configsArray.map((config) => { + return { + ...(config.readmePath ? { readmePath: normalizePath(config.readmePath) } : {}), + ...(config.tspconfigPath ? { tspconfigPath: normalizePath(config.tspconfigPath) } : {}), + }; + }); +} + +describe("detectChangedSpecConfigFiles", () => { + const currentFilePath = fileURLToPath(import.meta.url); + const repoRoot = path.resolve(path.dirname(currentFilePath), "fixtures/"); + + const mockCommandInput: SpecGenSdkCmdInput = { + localSpecRepoPath: repoRoot, + workingFolder: "", + runMode: "", + localSdkRepoPath: "", + sdkRepoName: "", + sdkLanguage: "", + specCommitSha: "", + specRepoHttpsUrl: "", + }; + + beforeEach(() => { + vi.clearAllMocks(); + }); + + test("case with empty change files", () => { + vi.mocked(getChangedFiles).mockReturnValue([]); + const result = detectChangedSpecConfigFiles(mockCommandInput); + expect(result).toEqual([]); + }); + + test("case with changed files but none under specification folder", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "eng/tools/script1.js", + "documentation/README.md", + "profile/2020-09-01-hybrid.json", + ]); + const result = detectChangedSpecConfigFiles(mockCommandInput); + expect(result).toEqual([]); + }); + + test("case with changed files only under scenarios folder", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/storage/scenarios/test1.json", + "specification/compute/scenarios/test2.json", + ]); + const result = detectChangedSpecConfigFiles(mockCommandInput); + expect(result).toEqual([]); + }); + + test("case with readme files", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + expect(normalizeResultItem(result[0])).toEqual({ + specs: [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + ], + readmeMd: "specification/contosowidgetmanager/resource-manager/readme.md", + }); + expect(normalizeResultItem(result[1])).toEqual({ + specs: [ + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json", + ], + readmeMd: "specification/contosowidgetmanager/data-plane/readme.md", + }); + }); + + test("case with tsp files", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/contosowidgetmanager/Contoso.Management/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/client.tsp", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + expect(normalizeResultItem(result[0])).toEqual({ + specs: [ + "specification/contosowidgetmanager/Contoso.Management/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/client.tsp", + ], + typespecProject: "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + }); + expect(normalizeResultItem(result[1])).toEqual({ + specs: ["specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml"], + typespecProject: "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + }); + }); + + test("case with shared files", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/client.tsp", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + expect(normalizeResultItem(result[0])).toEqual({ + specs: ["specification/contosowidgetmanager/Contoso.Management/client.tsp"], + typespecProject: "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + }); + expect(normalizeResultItem(result[1])).toEqual({ + specs: [ + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp", + ], + typespecProject: "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + }); + }); + + test("case with readme, tsp, shared files", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json", + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp", + "specification/contosowidgetmanager/Contoso.Management/client.tsp", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + expect(normalizeResultItem(result[0])).toEqual({ + specs: [ + "specification/contosowidgetmanager/resource-manager/Microsoft.Contoso/preview/2021-10-01-preview/examples/Employees_Get.json", + "specification/contosowidgetmanager/Contoso.Management/client.tsp", + ], + readmeMd: "specification/contosowidgetmanager/resource-manager/readme.md", + typespecProject: "specification/contosowidgetmanager/Contoso.Management/tspconfig.yaml", + }); + expect(normalizeResultItem(result[1])).toEqual({ + specs: [ + "specification/contosowidgetmanager/data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/widgets.json", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp", + ], + readmeMd: "specification/contosowidgetmanager/data-plane/readme.md", + typespecProject: "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + }); + }); + + test("case with V2 folder structure - resource-manager", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + "specification/service1/resource-manager/readme.md", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(1); + const normalizedResult = normalizeResultItem(result[0]); + + // In V2 structure, the TypeSpec project should be correctly associated with the readme + expect(normalizedResult).toEqual({ + specs: [ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + ], + readmeMd: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md", + typespecProject: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + }); + }); + + test("case with V2 folder structure - data-plane", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/service1/data-plane/widget/tspconfig.yaml", + "specification/service1/data-plane/widget/main.tsp", + "specification/service1/data-plane/readme.md", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(1); + const normalizedResult = normalizeResultItem(result[0]); + + // In V2 structure, the TypeSpec project should be correctly associated with the readme + expect(normalizedResult).toEqual({ + specs: [ + "specification/service1/data-plane/widget/tspconfig.yaml", + "specification/service1/data-plane/widget/main.tsp", + ], + readmeMd: "specification/service1/data-plane/widget/readme.md", + typespecProject: "specification/service1/data-plane/widget/tspconfig.yaml", + }); + }); + + test("case with V2 folder structure - nested subfolders", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/create.json", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/servicecontrol.json", + "specification/service1/resource-manager/Microsoft.Service1/readme.md", + "specification/service1/resource-manager/readme.md", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(1); + const normalizedResult = normalizeResultItem(result[0]); + + // The deepest TypeSpec project should be used, and both readme files should be cleaned up + expect(normalizedResult).toEqual({ + specs: [ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/examples/2021-11-01/create.json", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/stable/2021-11-01/servicecontrol.json", + ], + readmeMd: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md", + typespecProject: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + }); + }); + + test("case with V2 folder structure mixed with old structure", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + // V2 folder structure + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + "specification/service1/resource-manager/readme.md", + // Old folder structure + "specification/contosowidgetmanager/Contoso.WidgetManager/client.tsp", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + "specification/contosowidgetmanager/data-plane/readme.md", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + + // First result should be for the V2 structure + const normalizedV2Result = normalizeResultItem(result[0]); + expect(normalizedV2Result).toEqual({ + specs: [ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + ], + readmeMd: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md", + typespecProject: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + }); + + // Second result should be for the old structure + const normalizedOldResult = normalizeResultItem(result[1]); + expect(normalizedOldResult).toEqual({ + specs: [ + "specification/contosowidgetmanager/data-plane/readme.md", + "specification/contosowidgetmanager/Contoso.WidgetManager/client.tsp", + "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + ], + readmeMd: "specification/contosowidgetmanager/data-plane/readme.md", + typespecProject: "specification/contosowidgetmanager/Contoso.WidgetManager/tspconfig.yaml", + }); + }); + + test("case with multiple V2 folder structures", () => { + vi.mocked(getChangedFiles).mockReturnValue([ + // First service + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + "specification/service1/resource-manager/readme.md", + // Second service + "specification/service2/data-plane/widget2/tspconfig.yaml", + "specification/service2/data-plane/widget2/main.tsp", + "specification/service2/data-plane/readme.md", + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(2); + + // Results for both services should be properly processed with V2 structure + const service1Result = result.find((r) => r.typespecProject?.includes("service1")); + const service2Result = result.find((r) => r.typespecProject?.includes("service2")); + + expect(normalizeResultItem(service1Result!)).toEqual({ + specs: [ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + ], + readmeMd: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md", + typespecProject: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + }); + + expect(normalizeResultItem(service2Result!)).toEqual({ + specs: [ + "specification/service2/data-plane/widget2/tspconfig.yaml", + "specification/service2/data-plane/widget2/main.tsp", + ], + readmeMd: "specification/service2/data-plane/widget2/readme.md", + typespecProject: "specification/service2/data-plane/widget2/tspconfig.yaml", + }); + }); + + test("case with V2 folder structure - cross-platform path separators", () => { + // Mock getChangedFiles to return paths with both forward and backslashes + vi.mocked(getChangedFiles).mockReturnValue([ + String.raw`specification\service1\resource-manager\Microsoft.Service1\WidgetManagement\tspconfig.yaml`, + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + String.raw`specification\service1\resource-manager\readme.md`, + ]); + + const result = detectChangedSpecConfigFiles(mockCommandInput); + + expect(result).toHaveLength(1); + const normalizedResult = normalizeResultItem(result[0]); + + // The function should handle mixed path separators correctly + expect(normalizedResult).toEqual({ + specs: [ + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/main.tsp", + ], + readmeMd: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/readme.md", + typespecProject: + "specification/service1/resource-manager/Microsoft.Service1/WidgetManagement/tspconfig.yaml", + }); + }); +}); + +describe("groupSpecConfigPaths", () => { + test("should group TypeSpec configs and readme files by service", () => { + const tspconfigs = [ + "specification/storage/Storage.Management/tspconfig.yaml", + "specification/storage/StorageClient/tspconfig.yaml", + "specification/compute/Compute.Management/tspconfig.yaml", + ]; + + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/compute/resource-manager/readme.md", + ]; + + const result = normalizeSpecConfigsArray(groupSpecConfigPaths(tspconfigs, readmes)); + + expect(result).toHaveLength(3); // 2 from storage, 1 from compute + + // Verify the results contain both readme and tspconfig paths as expected + const storageManagementResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/Storage.Management/tspconfig.yaml" && + r.readmePath === "specification/storage/resource-manager/readme.md", + ); + expect(storageManagementResult).toBeDefined(); + + const storageClientResult = result.find( + (r) => r.tspconfigPath === "specification/storage/StorageClient/tspconfig.yaml", + ); + expect(storageClientResult).toBeDefined(); + + const computeManagementResult = result.find( + (r) => + r.tspconfigPath === "specification/compute/Compute.Management/tspconfig.yaml" && + r.readmePath === "specification/compute/resource-manager/readme.md", + ); + expect(computeManagementResult).toBeDefined(); + }); + + test("should handle empty inputs", () => { + const result = groupSpecConfigPaths([], []); + expect(result).toHaveLength(0); + }); + + test("should handle undefined inputs", () => { + const result = groupSpecConfigPaths(undefined, undefined); + expect(result).toHaveLength(0); + + const resultUndefinedReadmes = groupSpecConfigPaths([], undefined); + expect(resultUndefinedReadmes).toHaveLength(0); + + const resultUndefinedConfigs = groupSpecConfigPaths(undefined, []); + expect(resultUndefinedConfigs).toHaveLength(0); + }); + + test("should handle only TypeSpec configs", () => { + const tspconfigs = [ + "specification/storage/Storage.Management/tspconfig.yaml", + "specification/storage/StorageClient/tspconfig.yaml", + ]; + + const result = groupSpecConfigPaths(tspconfigs, []); + + expect(result).toHaveLength(2); + for (const item of result) { + expect(item.tspconfigPath).toBeDefined(); + expect(item.readmePath).toBeUndefined(); + } + }); + + test("should handle only readme files", () => { + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/compute/resource-manager/readme.md", + ]; + + const result = groupSpecConfigPaths([], readmes); + + expect(result).toHaveLength(2); + for (const item of result) { + expect(item.readmePath).toBeDefined(); + expect(item.tspconfigPath).toBeUndefined(); + } + }); + + test("should skip unmatched readme files when skipUnmatchedReadme is true", () => { + const tspconfigs = ["specification/storage/Storage.Management/tspconfig.yaml"]; + + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/compute/resource-manager/readme.md", // This will be skipped + ]; + + const result = normalizeSpecConfigsArray(groupSpecConfigPaths(tspconfigs, readmes, true)); + + expect(result).toHaveLength(1); // Only one matched pair, unmatched readme is skipped + + const storageManagementResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/Storage.Management/tspconfig.yaml" && + r.readmePath === "specification/storage/resource-manager/readme.md", + ); + expect(storageManagementResult).toBeDefined(); + + // Ensure the unmatched compute readme was skipped + const computeResult = result.find( + (r) => r.readmePath === "specification/compute/resource-manager/readme.md", + ); + expect(computeResult).toBeUndefined(); + }); + + test("should include unmatched readme files by default when skipUnmatchedReadme is false", () => { + const tspconfigs = ["specification/storage/Storage.Management/tspconfig.yaml"]; + + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/compute/resource-manager/readme.md", // This will be included + ]; + + const result = normalizeSpecConfigsArray(groupSpecConfigPaths(tspconfigs, readmes, false)); + + expect(result).toHaveLength(2); // One matched pair and one unmatched readme + + // Ensure the unmatched compute readme was included + const computeResult = result.find( + (r) => r.readmePath === "specification/compute/resource-manager/readme.md", + ); + expect(computeResult).toBeDefined(); + expect(computeResult?.tspconfigPath).toBeUndefined(); + }); + + test("should handle data plane and mgmt path together", () => { + const tspconfigs = [ + "specification/storage/Storage.Management/tspconfig.yaml", + "specification/storage/StorageClient/tspconfig.yaml", + "specification/compute/Compute.Management/tspconfig.yaml", + ]; + + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/storage/data-plane/readme.md", + "specification/compute/data-plane/readme.md", + ]; + + const result = normalizeSpecConfigsArray(groupSpecConfigPaths(tspconfigs, readmes)); + expect(result).toHaveLength(4); + + // Configs should be correctly matched + const storageMgmtResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/Storage.Management/tspconfig.yaml" && + r.readmePath === "specification/storage/resource-manager/readme.md", + ); + expect(storageMgmtResult).toBeDefined(); + + const storageDpResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/StorageClient/tspconfig.yaml" && + r.readmePath === "specification/storage/data-plane/readme.md", + ); + expect(storageDpResult).toBeDefined(); + + const computeMgmtResult = result.find( + (r) => r.tspconfigPath === "specification/compute/Compute.Management/tspconfig.yaml", + ); + expect(computeMgmtResult).toBeDefined(); + + const computeDpResult = result.find( + (r) => r.readmePath === "specification/compute/data-plane/readme.md", + ); + expect(computeDpResult).toBeDefined(); + }); + + test("should handle data plane and mgmt path together with skipUnmatchedReadme", () => { + const tspconfigs = [ + "specification/storage/Storage.Management/tspconfig.yaml", + "specification/storage/StorageClient/tspconfig.yaml", + ]; + + const readmes = [ + "specification/storage/resource-manager/readme.md", + "specification/storage/data-plane/readme.md", + "specification/compute/data-plane/readme.md", // This will be skipped when skipUnmatchedReadme is true + ]; + + const result = normalizeSpecConfigsArray(groupSpecConfigPaths(tspconfigs, readmes, true)); + expect(result).toHaveLength(2); // Only matched pairs from storage + + // Configs should be correctly matched for storage + const storageMgmtResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/Storage.Management/tspconfig.yaml" && + r.readmePath === "specification/storage/resource-manager/readme.md", + ); + expect(storageMgmtResult).toBeDefined(); + + const storageDpResult = result.find( + (r) => + r.tspconfigPath === "specification/storage/StorageClient/tspconfig.yaml" && + r.readmePath === "specification/storage/data-plane/readme.md", + ); + expect(storageDpResult).toBeDefined(); + + // The compute readme should be skipped + const computeDpResult = result.find( + (r) => r.readmePath === "specification/compute/data-plane/readme.md", + ); + expect(computeDpResult).toBeUndefined(); + }); +}); + +describe("processTypeSpecProjectsV2FolderStructure", () => { + test("should process resource-manager structure and return ChangedSpecs with correct paths", () => { + // Setup test data + const readmeMDResult = { + "specification/service/resource-manager": [ + "specification/service/resource-manager/readme.md", + ], + "specification/service/resource-manager/Microsoft.Service": [ + "specification/service/resource-manager/Microsoft.Service/readme.md", + ], + }; + + const typespecProjectResult = { + "specification/service/resource-manager/Microsoft.Service": [ + "specification/service/resource-manager/Microsoft.Service/tspconfig.yaml", + "specification/service/resource-manager/Microsoft.Service/main.tsp", + ], + }; + + // Run the function + const result = processTypeSpecProjectsV2FolderStructure(readmeMDResult, typespecProjectResult); + + // Normalize results for comparison + const normalizedResults = result.map((item) => normalizeResultItem(item)); + + // Verify results + expect(normalizedResults).toHaveLength(1); + + // Check structure of the returned ChangedSpecs object + const spec = normalizedResults[0]; + expect(spec.typespecProject).toBe( + normalizePath("specification/service/resource-manager/Microsoft.Service/tspconfig.yaml"), + ); + expect(spec.readmeMd).toBe( + normalizePath("specification/service/resource-manager/Microsoft.Service/readme.md"), + ); + expect(spec.specs).toContain( + normalizePath("specification/service/resource-manager/Microsoft.Service/tspconfig.yaml"), + ); + expect(spec.specs).toContain( + normalizePath("specification/service/resource-manager/Microsoft.Service/main.tsp"), + ); + expect(spec.specs).toContain( + normalizePath("specification/service/resource-manager/Microsoft.Service/readme.md"), + ); + + // Verify the readmeMDResult was modified as expected + expect(readmeMDResult).not.toHaveProperty("specification/service/resource-manager"); + expect(readmeMDResult).not.toHaveProperty( + "specification/service/resource-manager/Microsoft.Service", + ); + + // Verify the typespecProjectResult was modified as expected + expect(typespecProjectResult).not.toHaveProperty( + "specification/service/resource-manager/Microsoft.Service", + ); + }); + + test("should process data-plane structure and clean related readme entries", () => { + // Setup test data + const readmeMDResult = { + "specification/service/data-plane": ["specification/service/data-plane/readme.md"], + "specification/otherservice/data-plane": ["specification/otherservice/data-plane/readme.md"], + }; + + const typespecProjectResult = { + "specification/service/data-plane/client": [ + "specification/service/data-plane/client/tspconfig.yaml", + "specification/service/data-plane/client/main.tsp", + ], + }; + + // Run the function + const result = processTypeSpecProjectsV2FolderStructure(readmeMDResult, typespecProjectResult); + + // Normalize results for comparison + const normalizedResults = result.map((item) => normalizeResultItem(item)); + + // Verify results + expect(normalizedResults).toHaveLength(1); + + // Check that the data-plane structure was processed correctly + const spec = normalizedResults[0]; + expect(spec.typespecProject).toBe( + normalizePath("specification/service/data-plane/client/tspconfig.yaml"), + ); + expect(spec.readmeMd).toBeUndefined(); // No direct readme in the client folder + expect(spec.specs).toContain( + normalizePath("specification/service/data-plane/client/tspconfig.yaml"), + ); + expect(spec.specs).toContain(normalizePath("specification/service/data-plane/client/main.tsp")); + + // Verify related readme was removed + expect(readmeMDResult).not.toHaveProperty("specification/service/data-plane"); + + // Verify unrelated readme remains + expect(readmeMDResult).toHaveProperty("specification/otherservice/data-plane"); + + // Verify the typespecProjectResult was cleaned + expect(typespecProjectResult).not.toHaveProperty("specification/service/data-plane/client"); + }); + + test("should handle multiple levels in the folder structure", () => { + // Setup test data with nested structure + const readmeMDResult = { + "specification/service/resource-manager": [ + "specification/service/resource-manager/readme.md", + ], + "specification/service/resource-manager/Microsoft.Service": [ + "specification/service/resource-manager/Microsoft.Service/readme.md", + ], + "specification/service/resource-manager/Microsoft.Service/nested/subfolder": [ + "specification/service/resource-manager/Microsoft.Service/nested/subfolder/readme.md", + ], + }; + + const typespecProjectResult = { + "specification/service/resource-manager/Microsoft.Service/nested/subfolder": [ + "specification/service/resource-manager/Microsoft.Service/nested/subfolder/tspconfig.yaml", + "specification/service/resource-manager/Microsoft.Service/nested/subfolder/main.tsp", + ], + }; + + // Run the function + const result = processTypeSpecProjectsV2FolderStructure(readmeMDResult, typespecProjectResult); + + // Normalize results for comparison + const normalizedResults = result.map((item) => normalizeResultItem(item)); + + // Verify results + expect(normalizedResults).toHaveLength(1); + + // Check that the deeply nested structure was processed correctly + const spec = normalizedResults[0]; + expect(spec.readmeMd).toBe( + normalizePath( + "specification/service/resource-manager/Microsoft.Service/nested/subfolder/readme.md", + ), + ); + + // Verify parent readme entries were removed + expect(readmeMDResult).not.toHaveProperty("specification/service/resource-manager"); + expect(readmeMDResult).not.toHaveProperty( + "specification/service/resource-manager/Microsoft.Service", + ); + }); + + test("should handle empty inputs", () => { + const result = processTypeSpecProjectsV2FolderStructure({}, {}); + expect(result).toHaveLength(0); + }); +}); diff --git a/eng/tools/spec-gen-sdk-runner/test/utils/findParentWithFile.test.ts b/eng/tools/spec-gen-sdk-runner/test/utils/findParentWithFile.test.ts index f2d773a07065..d1a00509a538 100644 --- a/eng/tools/spec-gen-sdk-runner/test/utils/findParentWithFile.test.ts +++ b/eng/tools/spec-gen-sdk-runner/test/utils/findParentWithFile.test.ts @@ -2,12 +2,12 @@ import { describe, test, expect } from "vitest"; import { findParentWithFile } from "../../src/utils.js"; import { fileURLToPath } from "node:url"; import path from "node:path"; -import { typespecProjectRegex } from "../../src/change-files.js"; +import { typespecProjectRegex } from "../../src/spec-helpers.js"; describe("findParentWithFile", () => { // Get the absolute path to the repo root const currentFilePath = fileURLToPath(import.meta.url); - const repoRoot = path.resolve(path.dirname(currentFilePath), "../../../../../"); + const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); test("finds file in current directory", () => { const result = findParentWithFile( @@ -27,6 +27,11 @@ describe("findParentWithFile", () => { expect(result).toBe("specification/contosowidgetmanager/Contoso.WidgetManager"); }); + test("handles single segment path", () => { + const result = findParentWithFile(".", typespecProjectRegex, repoRoot); + expect(result).toBeUndefined(); + }); + test("stops at specified boundary", () => { const result = findParentWithFile( "specification/contosowidgetmanager/Contoso.WidgetManager", diff --git a/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedParentFolders.test.ts b/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedParentFolders.test.ts index 4abb37ea812d..7a87eb8b3196 100644 --- a/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedParentFolders.test.ts +++ b/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedParentFolders.test.ts @@ -2,12 +2,12 @@ import { describe, test, expect } from "vitest"; import { searchRelatedParentFolders } from "../../src/utils.js"; import { fileURLToPath } from "node:url"; import path from "node:path"; -import { readmeMdRegex, typespecProjectRegex } from "../../src/change-files.js"; +import { readmeMdRegex, typespecProjectRegex } from "../../src/spec-helpers.js"; describe("searchRelatedParentFolders", () => { // Get the absolute path to the repo root const currentFilePath = fileURLToPath(import.meta.url); - const repoRoot = path.resolve(path.dirname(currentFilePath), "../../../../../"); + const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); test("finds readme files for multiple paths", () => { const files = [ diff --git a/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedTypeSpecProjectBySharedLibrary.test.ts b/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedTypeSpecProjectBySharedLibrary.test.ts index 321937a11ff6..227bb3521592 100644 --- a/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedTypeSpecProjectBySharedLibrary.test.ts +++ b/eng/tools/spec-gen-sdk-runner/test/utils/searchRelatedTypeSpecProjectBySharedLibrary.test.ts @@ -6,23 +6,21 @@ import path from "node:path"; describe("searchRelatedTypeSpecProjectBySharedLibrary", () => { // Get the absolute path to the repo root const currentFilePath = fileURLToPath(import.meta.url); - const repoRoot = path.resolve(path.dirname(currentFilePath), "../../../../../"); + const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); test("finds related TypeSpec projects for shared libraries", () => { const sharedLibraries = [ - path.normalize("specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp"), + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp", ]; const result = searchRelatedTypeSpecProjectBySharedLibrary(sharedLibraries, { searchFileRegex: /^tspconfig\.yaml$/, specRepoFolder: repoRoot, }); - const expectedPath = path.normalize("specification/contosowidgetmanager/Contoso.WidgetManager"); + const expectedPath = "specification/contosowidgetmanager/Contoso.WidgetManager"; expect(Object.keys(result)).toHaveLength(1); expect(result[expectedPath]).toBeDefined(); - expect(result[expectedPath]).toContain( - sharedLibraries[0], - ); + expect(result[expectedPath]).toContain(sharedLibraries[0]); }); test("handles empty shared libraries array", () => { diff --git a/eng/tools/spec-gen-sdk-runner/test/utils/searchSharedLibrary.test.ts b/eng/tools/spec-gen-sdk-runner/test/utils/searchSharedLibrary.test.ts index 9eb1770d9036..5cd5cd908fa4 100644 --- a/eng/tools/spec-gen-sdk-runner/test/utils/searchSharedLibrary.test.ts +++ b/eng/tools/spec-gen-sdk-runner/test/utils/searchSharedLibrary.test.ts @@ -2,12 +2,12 @@ import { describe, test, expect } from "vitest"; import { searchSharedLibrary } from "../../src/utils.js"; import { fileURLToPath } from "node:url"; import path from "node:path"; -import { typespecProjectSharedLibraryRegex } from "../../src/change-files.js"; +import { typespecProjectSharedLibraryRegex } from "../../src/spec-helpers.js"; describe("searchSharedLibrary", () => { // Get the absolute path to the repo root const currentFilePath = fileURLToPath(import.meta.url); - const repoRoot = path.resolve(path.dirname(currentFilePath), "../../../../../"); + const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); test("identifies shared library files", () => { const files = [ diff --git a/eng/tools/spec-gen-sdk-runner/test/utils/utils.test.ts b/eng/tools/spec-gen-sdk-runner/test/utils/utils.test.ts index e4ece7d9ea9f..4dec6f2a9ded 100644 --- a/eng/tools/spec-gen-sdk-runner/test/utils/utils.test.ts +++ b/eng/tools/spec-gen-sdk-runner/test/utils/utils.test.ts @@ -1,15 +1,19 @@ -import { describe, test, expect } from "vitest"; +import { describe, test, expect, beforeEach, vi } from "vitest"; import { findFilesRecursive, findReadmeFiles, getRelativePathFromSpecification, + getArgumentValue, + mapToObject, + objectToMap, + normalizePath, } from "../../src/utils.js"; import { fileURLToPath } from "node:url"; import path from "node:path"; // Get the absolute path to the repo root const currentFilePath = fileURLToPath(import.meta.url); -const repoRoot = path.resolve(path.dirname(currentFilePath), "../../../../../"); +const repoRoot = path.resolve(path.dirname(currentFilePath), "../fixtures/"); describe("Utils", () => { describe("findFilesRecursive", () => { @@ -63,6 +67,14 @@ describe("Utils", () => { }); }); + describe("getArgumentValue", () => { + test("return the argument value", () => { + const args = ["--batch-type", "all-specs", "--pr-number", "9527"]; + const result = getArgumentValue(args, "--batch-type", ""); + expect(result).toBe("all-specs"); + }); + }); + describe("getRelativePathFromSpecification", () => { test("extracts path from specification folder", () => { const result = getRelativePathFromSpecification( @@ -89,4 +101,63 @@ describe("Utils", () => { expect(result).toBe(""); }); }); + + describe("mapToObject", () => { + test("converts Map to Object correctly", () => { + const map = new Map([ + ["key1", "value1"], + ["key2", "value2"], + ]); + const result = mapToObject(map); + expect(result).toEqual({ key1: "value1", key2: "value2" }); + }); + + test("handles empty Map", () => { + const map = new Map(); + const result = mapToObject(map); + expect(result).toEqual({}); + }); + }); + + describe("objectToMap", () => { + test("converts Object to Map correctly", () => { + const obj = { key1: "value1", key2: "value2" }; + const result = objectToMap(obj); + expect(result).toEqual( + new Map([ + ["key1", "value1"], + ["key2", "value2"], + ]), + ); + }); + + test("handles empty Object", () => { + const obj = {}; + const result = objectToMap(obj); + expect(result).toEqual(new Map()); + }); + }); + + describe("normalizePath", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + test("normalizePath in Windows", () => { + vi.spyOn(process, "platform", "get").mockReturnValue("win32"); + /* eslint-disable unicorn/prefer-string-raw */ + const path = "specification\\contosowidgetmanager\\Contoso.WidgetManager.Shared\\main.tsp"; + const convertPath = + "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp"; + const result = normalizePath(path); + expect(result).toEqual(convertPath); + }); + + test("normalizePath in Linux", () => { + vi.spyOn(process, "platform", "get").mockReturnValue("linux"); + const path = "specification/contosowidgetmanager/Contoso.WidgetManager.Shared/main.tsp"; + const result = normalizePath(path); + expect(result).toEqual(path); + }); + }); }); diff --git a/eng/tools/spec-gen-sdk-runner/tsconfig.json b/eng/tools/spec-gen-sdk-runner/tsconfig.json index fde5092731fb..ec85640f015c 100644 --- a/eng/tools/spec-gen-sdk-runner/tsconfig.json +++ b/eng/tools/spec-gen-sdk-runner/tsconfig.json @@ -2,8 +2,10 @@ "extends": "../tsconfig.json", "compilerOptions": { "outDir": "./dist", + "rootDir": ".", "module": "ESNext", "target": "ESNext", "moduleResolution": "node", - } + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], } diff --git a/eng/tools/specs-model/.prettierignore b/eng/tools/specs-model/.prettierignore deleted file mode 100644 index fb1653e08b6d..000000000000 --- a/eng/tools/specs-model/.prettierignore +++ /dev/null @@ -1,4 +0,0 @@ -*.json -*.md -*.jsonc -dist diff --git a/eng/tools/specs-model/.prettierrc.mjs b/eng/tools/specs-model/.prettierrc.mjs deleted file mode 100644 index 7bc9d0d1cd4d..000000000000 --- a/eng/tools/specs-model/.prettierrc.mjs +++ /dev/null @@ -1,18 +0,0 @@ -// This config is adapted from https://github.com/microsoft/typespec/blob/main/.prettierrc.json -// See eng/README.md for context. -/** - * @see https://prettier.io/docs/en/configuration.html - * @type {import("prettier").Config} - */ -const config = { - arrowParens: "always", - trailingComma: "es5", - bracketSpacing: true, - endOfLine: "lf", - printWidth: 100, - semi: true, - singleQuote: false, - tabWidth: 2, -}; - -export default config; diff --git a/eng/tools/specs-model/cmd/get-specs-model.js b/eng/tools/specs-model/cmd/get-specs-model.js deleted file mode 100755 index cf0bb73957bc..000000000000 --- a/eng/tools/specs-model/cmd/get-specs-model.js +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env node - -import { main } from "../dist/index.js"; - -await main(); diff --git a/eng/tools/specs-model/eslint.config.js b/eng/tools/specs-model/eslint.config.js deleted file mode 100644 index 03ff25bbdad9..000000000000 --- a/eng/tools/specs-model/eslint.config.js +++ /dev/null @@ -1,98 +0,0 @@ -// @ts-check - -// The overall contents of this file is based on: -// https://typescript-eslint.io/getting-started#step-2-configuration -// https://typescript-eslint.io/getting-started/typed-linting/#shared-configurations -// Read inline comments for details on other sources. - -import eslint from "@eslint/js"; -import eslintPluginUnicorn from "eslint-plugin-unicorn"; -import tseslint from "typescript-eslint"; - -const config = tseslint.config( - // ======================================== - // ESLint + TS-ESLint configs - // ======================================== - { - // Needed for 'npm run lint' per: - // https://eslint.org/docs/latest/use/configure/migration-guide#--ext - // See also: - // - https://typescript-eslint.io/troubleshooting/typed-linting/#i-get-errors-telling-me-eslint-was-configured-to-run--however-that-tsconfig-does-not--none-of-those-tsconfigs-include-this-file - // - https://eslint.org/docs/latest/use/configure/ignore - ignores: [".prettierrc.cjs", "**/*.d.ts", "**/*.js", "**/*.mjs"], - }, - eslint.configs.recommended, - ...tseslint.configs.strictTypeChecked, - ...tseslint.configs.stylisticTypeChecked, - { - languageOptions: { - parserOptions: { - project: true, - tsconfigRootDir: import.meta.dirname, - }, - }, - }, - { - // Disable type-aware linting on .js files - // Otherwise eslint would complain about types in .js files, including this config file. - // Config snippet taken from https://typescript-eslint.io/packages/typescript-eslint/#advanced-usage - // Note: this is likely redundant with the global ignores of .js files, but keeping here for reference. - files: ["**/*.js"], - ...tseslint.configs.disableTypeChecked, - }, - - // ======================================== - // Secondary configs - // ======================================== - // @ts-expect-error The unicorn configs are not typed correctly, but they do work. - // Snippet taken from https://github.com/sindresorhus/eslint-plugin-unicorn#preset-configs-eslintconfigjs - eslintPluginUnicorn.configs["flat/recommended"], - // Note: in spite of my best efforts, I did not manage to get lodash eslint plugin to work in a clean way. - // https://github.com/wix-incubator/eslint-plugin-lodash - // I did manage to get it to lint, but the ESLint server output was throwing error about - // not being able to locate the config file. - // I suspect this is because the plugin was not migrated to the new flat config format since ESLint v9. - // Maybe this can be worked around with - // https://eslint.org/blog/2024/05/eslint-compatibility-utilities/ - - // ======================================== - // Rulesets overrides - // ======================================== - { - rules: { - // Sometimes we have to help the type checker with "!": - // e.g. when doing `if (arr.length > 0) { const ... = arr[0]! }` - // Note: this originates from [strict] - // https://typescript-eslint.io/rules/no-non-null-assertion - "@typescript-eslint/no-non-null-assertion": "off", - - // We want more flexibility with file names. - // https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/filename-case.md - "unicorn/filename-case": "off", - - // We prefer to have explicitly import at the top of the file, even if the same element is exported again, - // which we do in index.ts files. - // https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/prefer-export-from.md - "unicorn/prefer-export-from": ["error", { ignoreUsedVariables: true }], - - // We allow some abbreviations that we like. - // https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/prevent-abbreviations.md - "unicorn/prevent-abbreviations": [ - "error", - { - allowList: { - args: true, - }, - }, - ], - }, - } -); - -export default config; - -// Debug tool: -// Uncomment to print the config. View it in VS Code / Output / ESLint. Run "ESLint: Restart ESLint Server" command to force output. -// console.log(`ESLint config: ${JSON.stringify(config)}`) - -// [strict]: https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/eslint-plugin/src/configs/strict.ts diff --git a/eng/tools/specs-model/package.json b/eng/tools/specs-model/package.json deleted file mode 100644 index 04ea395e6628..000000000000 --- a/eng/tools/specs-model/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@azure-tools/specs-model", - "private": true, - "type": "module", - "main": "dist/src/index.js", - "bin": { - "get-specs-model": "cmd/get-specs-model.js" - }, - "scripts": { - "build": "tsc --build", - "test": "vitest", - "test:ci": "vitest run --coverage --reporter=verbose", - "lint": "eslint . -c eslint.config.js --report-unused-disable-directives --max-warnings 0", - "lint:fix": "eslint . -c eslint.config.js --fix", - "prettier": "prettier . --check", - "prettier:debug": "prettier . --check ---log-level debug", - "prettier:write": "prettier . --write" - }, - "engines": { - "node": ">=20.0.0" - }, - "dependencies": {}, - "devDependencies": { - "@eslint/js": "^9.21.0", - "@tsconfig/strictest": "^2.0.5", - "@types/node": "^20.0.0", - "@vitest/coverage-v8": "^3.0.7", - "eslint": "^9.21.0", - "eslint-plugin-unicorn": "^58.0.0", - "prettier": "~3.5.3", - "typescript": "~5.8.2", - "typescript-eslint": "^8.26.0", - "vitest": "^3.0.7" - } -} diff --git a/eng/tools/specs-model/src/getSpecsModel.ts b/eng/tools/specs-model/src/getSpecsModel.ts deleted file mode 100644 index 3377729002b2..000000000000 --- a/eng/tools/specs-model/src/getSpecsModel.ts +++ /dev/null @@ -1,7 +0,0 @@ -export async function getSpecsModel(path: string): Promise { - // eslint-disable-next-line @typescript-eslint/require-await - await (async () => { - console.log(path); - })(); - return `stub getSpecsModel. path: ${path}`; -} diff --git a/eng/tools/specs-model/src/index.ts b/eng/tools/specs-model/src/index.ts deleted file mode 100644 index e188045340d5..000000000000 --- a/eng/tools/specs-model/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { exit } from "node:process"; -import { getSpecsModel } from "./getSpecsModel.js"; - -function getUsage(): string { - return ( - " Usage: npx get-specs-model \n" + - "Returns: JSON metadata for the input file or directory.\n" + - "\n" + - "The input path:\n" + - "- Must be an absolute or relative path to local clone of the https://github.com/Azure/azure-rest-api-specs or https://github.com/Azure/azure-rest-api-specs-pr repo.\n" + - "- Must point to the /specification directory or one of its descendants.\n" + - "\n" + - "Example: npx get-specs-model $HOME/repos/azure-rest-api-specs/specification\n" + - "Returns: JSON with metadata for the entire 'specification' directory of the local clone of 'azure-rest-api-specs' repo.\n" - ); -} - -export async function main() { - const args: string[] = process.argv.slice(2); - - if (args.length > 0) { - const path: string = args[0]!; - const specsModel: string = await getSpecsModel(path); - console.log(JSON.stringify(specsModel)); - exit(0); - } else { - console.error(getUsage()); - exit(1); - } -} - -export { getSpecsModel }; diff --git a/eng/tools/specs-model/test/getSpecsModel.test.ts b/eng/tools/specs-model/test/getSpecsModel.test.ts deleted file mode 100644 index a4d8cc32c531..000000000000 --- a/eng/tools/specs-model/test/getSpecsModel.test.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { expect, test } from "vitest"; -import { getSpecsModel } from "../src/getSpecsModel.js"; - -test("example getSpecsModel test", async () => { - const output = await getSpecsModel("foo_path"); - expect(output).toEqual("stub getSpecsModel. path: foo_path"); -}); diff --git a/eng/tools/specs-model/tsconfig.json b/eng/tools/specs-model/tsconfig.json deleted file mode 100644 index 4a6486ecc5e4..000000000000 --- a/eng/tools/specs-model/tsconfig.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "extends": [ - "../tsconfig.json", - // [strictest] - "@tsconfig/strictest/tsconfig.json" - ], - "compilerOptions": { - "outDir": "./dist", - "target": "ES2022", - "lib": ["ES2022"], - - // checkJS is set to true by [strictest] but we need to disable it to avoid this [build failure]. - // We don't need it anyway, as all sources are in .ts except the 3-line cmd entry-point. - // https://www.typescriptlang.org/tsconfig/#checkJs - "checkJs": false - } -} - -// [strictest]: https://www.npmjs.com/package/@tsconfig/strictest from [tsconfig bases] -// [tsconfig bases]: https://github.com/tsconfig/bases#centralized-recommendations-for-tsconfig-bases from https://www.typescriptlang.org/tsconfig#target -// [build failure]: https://stackoverflow.com/questions/42609768/typescript-error-cannot-write-file-because-it-would-overwrite-input-file diff --git a/eng/tools/suppressions/package.json b/eng/tools/suppressions/package.json index 52250330d8d4..c3f8f2d7c854 100644 --- a/eng/tools/suppressions/package.json +++ b/eng/tools/suppressions/package.json @@ -8,6 +8,9 @@ }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", "test:ci": "vitest run --coverage --reporter=verbose" }, @@ -23,6 +26,7 @@ "devDependencies": { "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" } diff --git a/eng/tools/suppressions/src/suppressions.ts b/eng/tools/suppressions/src/suppressions.ts index 65389eea203f..018933651fb2 100644 --- a/eng/tools/suppressions/src/suppressions.ts +++ b/eng/tools/suppressions/src/suppressions.ts @@ -1,14 +1,18 @@ import { Stats } from "fs"; import { access, constants, lstat, readFile } from "fs/promises"; import { minimatch } from "minimatch"; +import { createRequire } from "module"; import { dirname, join, resolve, sep } from "path"; import { sep as posixSep } from "path/posix"; +import vm from "vm"; import { parse as yamlParse } from "yaml"; import { z } from "zod"; import { fromError } from "zod-validation-error"; export interface Suppression { tool: string; + // String of JavaScript CJS code, executed in a prepared context, that determines if a suppression should be included + if?: string; // Output only exposes "paths". For input, if "path" is defined, it is inserted at the start of "paths". paths: string[]; rules?: string[]; @@ -20,6 +24,7 @@ const suppressionSchema = z.array( z .object({ tool: z.string(), + if: z.string().optional(), // For now, input allows "path" alongside "paths". Lather, may deprecate "path". path: z.string().optional(), paths: z.array(z.string()).optional(), @@ -39,6 +44,7 @@ const suppressionSchema = z.array( } return { tool: s.tool, + if: s.if, paths: paths, rules: s.rules, subRules: s["sub-rules"], @@ -70,7 +76,11 @@ const suppressionSchema = z.array( * ); * ``` */ -export async function getSuppressions(tool: string, path: string): Promise { +export async function getSuppressions( + tool: string, + path: string, + context: Record = {}, +): Promise { path = resolve(path); // If path doesn't exist, throw instead of returning "[]" to prevent confusion @@ -86,6 +96,7 @@ export async function getSuppressions(tool: string, path: string): Promise = {}, ): Suppression[] { path = resolve(path); suppressionsFile = resolve(suppressionsFile); @@ -140,19 +152,28 @@ export function getSuppressionsFromYaml( throw fromError(err); } - return suppressions - .filter((s) => s.tool === tool) - .filter((s) => { - // Minimatch only allows forward-slashes in patterns and input - const pathPosix: string = path.split(sep).join(posixSep); - - return s.paths.some((suppressionPath) => { - const pattern: string = join(dirname(suppressionsFile), suppressionPath) - .split(sep) - .join(posixSep); - return minimatch(pathPosix, pattern); - }); - }); + // Make "require" available inside sandbox for CJS imports + const sandbox = { ...context, require: createRequire(import.meta.url) }; + + return ( + suppressions + // Tool name + .filter((s) => s.tool === tool) + // Path + .filter((s) => { + // Minimatch only allows forward-slashes in patterns and input + const pathPosix: string = path.split(sep).join(posixSep); + + return s.paths.some((suppressionPath) => { + const pattern: string = join(dirname(suppressionsFile), suppressionPath) + .split(sep) + .join(posixSep); + return minimatch(pathPosix, pattern); + }); + }) + // If + .filter((s) => s.if === undefined || vm.runInNewContext(s.if, sandbox)) + ); } /** diff --git a/eng/tools/suppressions/test/e2e/merge/foo/bar/suppressions.yaml b/eng/tools/suppressions/test/e2e/merge/foo/bar/suppressions.yaml index 113dc0b7729e..e385b93acaa2 100644 --- a/eng/tools/suppressions/test/e2e/merge/foo/bar/suppressions.yaml +++ b/eng/tools/suppressions/test/e2e/merge/foo/bar/suppressions.yaml @@ -1,9 +1,9 @@ - tool: TestTool - path: '**' + path: "**" reason: bar-globstar - tool: TestTool - path: '*' + path: "*" reason: bar-star - tool: TestTool diff --git a/eng/tools/suppressions/test/e2e/merge/foo/suppressions.yaml b/eng/tools/suppressions/test/e2e/merge/foo/suppressions.yaml index cdc396f96095..7e45c2b73aff 100644 --- a/eng/tools/suppressions/test/e2e/merge/foo/suppressions.yaml +++ b/eng/tools/suppressions/test/e2e/merge/foo/suppressions.yaml @@ -1,9 +1,9 @@ - tool: TestTool - path: '**' + path: "**" reason: foo-globstar - tool: TestTool - path: '*' + path: "*" reason: foo-star - tool: TestTool diff --git a/eng/tools/suppressions/test/suppressions.test.ts b/eng/tools/suppressions/test/suppressions.test.ts index ff5bad696846..877e29bb1e67 100644 --- a/eng/tools/suppressions/test/suppressions.test.ts +++ b/eng/tools/suppressions/test/suppressions.test.ts @@ -268,6 +268,7 @@ test("suppression with rules", () => { expect(suppressions).toStrictEqual([ { tool: "TestTool", + if: undefined, paths: ["foo"], rules: ["my-rule"], subRules: ["my.option.a", "my.option.b"], @@ -275,3 +276,69 @@ test("suppression with rules", () => { }, ]); }); + +test.each([ + { context: { foo: false, bar: false }, expected: ["no-if", "process-version"] }, + { + context: { foo: true, bar: false }, + expected: ["no-if", "if-foo", "if-foo-or-bar", "process-version"], + }, + { + context: { foo: false, bar: true }, + expected: ["no-if", "if-bar", "if-foo-or-bar", "process-version"], + }, + { + context: { foo: true, bar: true }, + expected: ["no-if", "if-foo", "if-bar", "if-foo-or-bar", "if-foo-and-bar", "process-version"], + }, +])("if($context)", ({ context, expected }) => { + const suppressionYaml = ` +- tool: TestTool + path: "**" + reason: no-if +- tool: TestTool + path: "**" + if: foo + reason: if-foo +- tool: TestTool + path: "**" + if: bar + reason: if-bar +- tool: TestTool + path: "**" + if: foo || bar + reason: if-foo-or-bar +- tool: TestTool + path: "**" + if: foo && bar + reason: if-foo-and-bar +- tool: TestTool + path: "**" + if: require("process").version.startsWith("v") + reason: process-version +`; + + let suppressions: Suppression[] = getSuppressionsFromYaml( + "TestTool", + "test-path", + "suppressions.yaml", + suppressionYaml, + context, + ); + + expect(suppressions.map((s) => s.reason).sort()).toEqual(expected.sort()); +}); + +test.each([ + ["invalid javascript", "Unexpected identifier 'javascript'"], + ["1(1)", "1 is not a function"], +])("if: %s", (ifExpression, expectedException) => { + expect(() => + getSuppressionsFromYaml( + "TestTool", + "test-path", + "suppressions.yaml", + `- tool: TestTool\n if: "${ifExpression}"\n path: "**"\n reason: test`, + ), + ).throws(expectedException); +}); diff --git a/eng/tools/suppressions/tsconfig.json b/eng/tools/suppressions/tsconfig.json index ec6d6640928a..1c9d0b24bed9 100644 --- a/eng/tools/suppressions/tsconfig.json +++ b/eng/tools/suppressions/tsconfig.json @@ -2,5 +2,7 @@ "extends": "../tsconfig.json", "compilerOptions": { "outDir": "./dist", - } + "rootDir": ".", + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], } diff --git a/eng/tools/suppressions/vitest.config.ts b/eng/tools/suppressions/vitest.config.ts new file mode 100644 index 000000000000..bc6ad4809131 --- /dev/null +++ b/eng/tools/suppressions/vitest.config.ts @@ -0,0 +1,9 @@ +import { configDefaults, defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + coverage: { + exclude: [...configDefaults.coverage.exclude!, "cmd/**", "src/index.ts"], + }, + }, +}); diff --git a/eng/tools/tsconfig.json b/eng/tools/tsconfig.json index b7de91a51f12..ac5fe54c268f 100644 --- a/eng/tools/tsconfig.json +++ b/eng/tools/tsconfig.json @@ -1,23 +1,45 @@ { "extends": "../../tsconfig.json", "compilerOptions": { - "target": "ES6", - "module": "Node16", - + "target": "es2024", + "module": "NodeNext", // override "importHelpers:true" in root tsconfig.json "importHelpers": false, - // required to use project references "composite": true, }, + // Compile nothing at this level + "files": [], "references": [ - { "path": "./specs-model" }, - { "path": "./suppressions" }, - { "path": "./tsp-client-tests" }, - { "path": "./typespec-requirement" }, - { "path": "./typespec-validation" }, - { "path": "./sdk-suppressions" }, - { "path": "./spec-gen-sdk-runner"}, - { "path": "./lint-diff" } - ] + { + "path": "./lint-diff", + }, + { + "path": "./oav-runner", + }, + { + "path": "./sdk-suppressions", + }, + { + "path": "./openapi-diff-runner", + }, + { + "path": "./spec-gen-sdk-runner", + }, + { + "path": "./suppressions", + }, + { + "path": "./tsp-client-tests", + }, + { + "path": "./typespec-migration-validation", + }, + { + "path": "./typespec-requirement", + }, + { + "path": "./typespec-validation", + }, + ], } diff --git a/eng/tools/tsp-client-tests/package.json b/eng/tools/tsp-client-tests/package.json index 03df1e1ba13b..168b49e23b67 100644 --- a/eng/tools/tsp-client-tests/package.json +++ b/eng/tools/tsp-client-tests/package.json @@ -3,13 +3,17 @@ "private": true, "type": "module", "devDependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", "@types/node": "^20.0.0", - "execa": "^9.3.0", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", "test:ci": "vitest run --reporter=verbose" }, diff --git a/eng/tools/tsp-client-tests/test/tsp-client.test.ts b/eng/tools/tsp-client-tests/test/tsp-client.test.ts index 9b2e8fc455d6..71565a1eb619 100644 --- a/eng/tools/tsp-client-tests/test/tsp-client.test.ts +++ b/eng/tools/tsp-client-tests/test/tsp-client.test.ts @@ -1,18 +1,13 @@ -import { execa } from "execa"; +import { execNpmExec } from "@azure-tools/specs-shared/exec"; +import { debugLogger } from "@azure-tools/specs-shared/logger"; + import { access, constants, mkdir, rm } from "fs/promises"; import { dirname, join } from "path"; import { ExpectStatic, test } from "vitest"; const repoRoot = join(__dirname, "..", "..", "..", ".."); -async function npmExec(...args: string[]) { - const allArgs = ["exec", "--no", "--"].concat(args); - console.log(`${repoRoot}$ npm ${allArgs.join(" ")}`); - - const result = await execa("npm", allArgs, { all: true, cwd: repoRoot, reject: false }); - console.log(result.all); - return result; -} +const options = { cwd: repoRoot, logger: debugLogger }; async function convert(expect: ExpectStatic, readme: string) { const resMan = readme.includes("resource-manager"); @@ -29,19 +24,21 @@ async function convert(expect: ExpectStatic, readme: string) { } try { - let { stdout, all, exitCode } = await npmExec( - "tsp-client", - "convert", - "--no-prompt", - "--swagger-readme", - readme, - "-o", - outputFolder, - resMan ? "--arm" : "", + let result = await execNpmExec( + [ + "tsp-client", + "convert", + "--no-prompt", + "--swagger-readme", + readme, + "-o", + outputFolder, + resMan ? "--arm" : "", + ], + options, ); - expect(stdout).toContain("Converting"); - expect(exitCode, all).toBe(0); + expect(result.stdout).toContain("Converting"); const tspConfigYaml = join(outputFolder, "tspconfig.yaml"); await access(tspConfigYaml, constants.R_OK); @@ -52,10 +49,9 @@ async function convert(expect: ExpectStatic, readme: string) { console.log(`File exists: ${mainTsp}`); // Use "--no-emit" to avoid generating output files that would need to be cleaned up - ({ stdout, all, exitCode } = await npmExec("tsp", "compile", "--no-emit", outputFolder)); + result = await execNpmExec(["tsp", "compile", "--no-emit", outputFolder], options); - expect(stdout).toContain("TypeSpec compiler"); - expect(exitCode, all).toBe(0); + expect(result.stdout).toContain("TypeSpec compiler"); } finally { await rm(outputFolder, { recursive: true, force: true }); } @@ -65,10 +61,7 @@ async function convert(expect: ExpectStatic, readme: string) { } test.concurrent("Usage", async ({ expect }) => { - const { all, exitCode } = await npmExec("tsp-client"); - - expect(all).toContain("Usage"); - expect(exitCode).not.toBe(0); + await expect(execNpmExec(["tsp-client"], options)).rejects.toThrow("Usage"); }); // Disabled since tsp-client is failing on data-plane diff --git a/eng/tools/tsp-client-tests/tsconfig.json b/eng/tools/tsp-client-tests/tsconfig.json index ec6d6640928a..8bf1d5b99616 100644 --- a/eng/tools/tsp-client-tests/tsconfig.json +++ b/eng/tools/tsp-client-tests/tsconfig.json @@ -1,6 +1,9 @@ { "extends": "../tsconfig.json", "compilerOptions": { + "allowJs": true, "outDir": "./dist", - } + "rootDir": ".", + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], } diff --git a/eng/tools/typespec-migration-validation/cmd/tsmv.js b/eng/tools/typespec-migration-validation/cmd/tsmv.js new file mode 100755 index 000000000000..e2a37e0b5491 --- /dev/null +++ b/eng/tools/typespec-migration-validation/cmd/tsmv.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { main } from "../dist/src/index.js"; + +await main(); diff --git a/eng/tools/typespec-migration-validation/package.json b/eng/tools/typespec-migration-validation/package.json new file mode 100644 index 000000000000..b41785aaa4a7 --- /dev/null +++ b/eng/tools/typespec-migration-validation/package.json @@ -0,0 +1,36 @@ +{ + "name": "@azure-tools/typespec-migration-validation", + "private": true, + "type": "module", + "main": "dist/src/index.js", + "bin": { + "tsmv": "cmd/tsmv.js" + }, + "dependencies": { + "@azure-tools/typespec-autorest": ">=0.44.0 <1.0.0", + "json-diff": "^1.0.6", + "yargs": "^18.0.0" + }, + "devDependencies": { + "@types/json-diff": "^1.0.3", + "@types/node": "^18.19.86", + "@types/yargs": "^17.0.33", + "@typescript-eslint/eslint-plugin": "^8.32.1", + "@typescript-eslint/parser": "^8.32.1", + "eslint": "^9.26.0", + "prettier": "~3.5.3", + "typescript": "^5.8.3" + }, + "scripts": { + "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", + "test": "vitest", + "test:ci": "vitest run --coverage --reporter=verbose", + "watch": "tsc --build --watch" + }, + "engines": { + "node": ">=20.0.0" + } +} diff --git a/eng/tools/typespec-migration-validation/prompts/globalSuppressionToLocal.md b/eng/tools/typespec-migration-validation/prompts/globalSuppressionToLocal.md new file mode 100644 index 000000000000..ffc54909dcc7 --- /dev/null +++ b/eng/tools/typespec-migration-validation/prompts/globalSuppressionToLocal.md @@ -0,0 +1,62 @@ +# Remove global suppressions + +As a GitHub Copilot assistant, follow this guide to remove all the global suppressions and specify them locally. + +## Understanding User Input + +When users request help with converting global suppressions to inline suppressions in TypeSpec and Swagger specifications, and adding appropriate justifications for all suppressions, they typically provide a {TypeSpec-folder} in the format `{ServiceName}.Management` (where ServiceName examples include: `Compute`, `Network`, `Storage`) or a `tspconfig.yaml` file. + +When the user provides a service folder name, look for the `tspconfig.yaml` under that folder and base subsequent work on the content of that file. + +If the user provides a `tspconfig.yaml` file, use the information in that file directly to guide the migration. In this case, {TypeSpec-folder} is the folder containing that file. + +If the user does not provide this information, be sure to ask. Use this information to customize your guidance, especially in the following cases: + +- Selecting appropriate conversion commands +- Providing service-type-related examples + +## Migration Steps + +## Step 1: Locate the tspconfig.yaml file + +1. **Check the `tspconfig.yaml` file** + + - Location: `{ServiceName}.Management/tspconfig.yaml` or the `tspconfig.yaml` file directly provided by the user + +2. **Check the disable section in the tspconfig.yaml file** + - Look for all configurations under `disable:`, for example: + ```yaml + linter: + extends: + - "@azure-tools/typespec-azure-rulesets/resource-manager" + disable: + "@azure-tools/typespec-azure-core/no-nullable": "backward-compatibility" + ``` + - These configurations represent global suppressions that you need to convert to inline suppressions. + +## Step 2: Migrate global suppressions to inline suppressions + +1. Remove the `disable:` section from the `tspconfig.yaml` file, and remember all the warning types that were suppressed by the removed global suppressions.. + +2. Recompile the tsp files to generate new Swagger files while reproducing the warnings that were previously globally suppressed. + +```powershell +cd {TypeSpec-folder} +npx tsp compile . +``` + +3. For warnings that appear during the compilation process, you need to categorize the warnings according to the warning types from the previously removed global suppressions and add inline suppressions in the corresponding TypeSpec files. The format for inline suppressions is as follows: + +```typespec +#suppress "@azure-tools/typespec-azure-core/no-openapi" "For backward compatibility with existing API" +@operationId("WebPubSubCustomDomains_Get") +get is ArmResourceRead; +``` + +Note that inline suppressions need to be placed above the relevant code and require clear justification. The justification in the example, "For backward compatibility with existing API", is a good example of clear reasoning. + +## Step 3: Validation and Testing + +1. **Validate inline suppressions**: + - Ensure all inline suppressions have been correctly added with clear and understandable justifications. + - Recompile the TypeSpec files to ensure there are no new warnings or errors. diff --git a/eng/tools/typespec-migration-validation/scripts/download-main.ps1 b/eng/tools/typespec-migration-validation/scripts/download-main.ps1 new file mode 100644 index 000000000000..be60fe389cb4 --- /dev/null +++ b/eng/tools/typespec-migration-validation/scripts/download-main.ps1 @@ -0,0 +1,105 @@ +param( + [string]$swaggerPath, + [string]$callValidation = $false +) + +. $PSScriptRoot/../../../scripts/ChangedFiles-Functions.ps1 +function Download-Swagger-InMain($swaggerFolder, $latestCommitId) { + # sparce checkout its resource-manager swagger folder, later we also add the data-plane folder + $repoUrl = "https://github.com/Azure/azure-rest-api-specs" + $repoRoot = git rev-parse --show-toplevel + $cloneDir = Join-Path $repoRoot "sparse-spec" + if (!(Test-Path $cloneDir)) { + New-Item -Path $cloneDir -ItemType Directory | Out-Null + } + + Push-Location $cloneDir + try { + if (!(Test-Path ".git")) { + Write-Host "Initializing sparse clone for repo: $repoUrl" + git clone --no-checkout --filter=tree:0 $repoUrl . + if ($LASTEXITCODE) { exit $LASTEXITCODE } + git sparse-checkout init + if ($LASTEXITCODE) { exit $LASTEXITCODE } + Remove-Item .git/info/sparse-checkout -Force + } + + Write-Host "Updating sparse checkout file with directory: $swaggerFolder" + Add-Content .git/info/sparse-checkout $swaggerFolder + git sparse-checkout reapply + if ($LASTEXITCODE) { exit $LASTEXITCODE } + + Write-Host "Checking out commit: $latestCommitId" + git checkout $latestCommitId + if ($LASTEXITCODE) { exit $LASTEXITCODE } + + return Join-Path $cloneDir $swaggerFolder + } + finally { + Pop-Location + } +} + +if ($swaggerPath -eq "") { + # Get all changed swagger files + $changedSwaggers = Get-ChangedSwaggerFiles + if ($changedSwaggers.Count -eq 0) { + Write-Host "No swagger files changed." + exit 0 + } + + Write-Host "Processing changed swagger files:" + $changedSwaggers | ForEach-Object { + Write-Host "Processing $_" + try { + $content = Get-Content $_ -Raw + $jsonContent = $content | ConvertFrom-Json + + # Check if the swagger is TypeSpec generated + if ($null -ne $jsonContent.info -and $jsonContent.info.'x-typespec-generated' -ne $null){ + $swaggerPath = $_ + Write-Host "Found TypeSpec generated swagger file: $swaggerPath" + } + } + catch { + Write-Warning "Failed to parse JSON from $_. Error: $($_.Exception.Message)" + continue + } + } +} + +if ($swaggerPath -eq "") { + Write-Host "No TypeSpec generated swagger file found." + exit 1 +} + +# Get latest commit id from main branch +$latestCommitId = git ls-remote "https://github.com/Azure/azure-rest-api-specs.git" main | Select-String -Pattern "refs/heads/main" | ForEach-Object { $_.ToString().Split("`t")[0] } +Write-Host "Latest commit id from main branch: $latestCommitId" + +$swaggerFolder = "" +$swaggerPath = $swaggerPath.Replace("\", "/") +if ($swaggerPath -match "specification/([a-z]*)/resource-manager/(.*)/(stable|preview)/([a-z0-9-]+)/(.*).json") { + $swaggerFolder = "specification/$($matches[1])/resource-manager/$($matches[2])/$($matches[3])/$($matches[4])/" + Write-Host "Swagger folder: $swaggerFolder" +} +else { + Write-Host "Please provide the path of the swagger that generated from your TypeSpec." + exit 1 +} + +$swaggerInMain = Download-Swagger-InMain $swaggerFolder $latestCommitId + +$repoRoot = git rev-parse --show-toplevel +if ($swaggerPath.StartsWith("specification")) { + $swaggerPath = Join-Path $repoRoot $swaggerPath +} + +if ($callValidation -eq $true) { + Write-Host "Executing TypeSpec migration validation..." + npx tsmv $swaggerInMain $swaggerPath +} +else { + Write-Host "Your next command: npx tsmv $swaggerInMain $swaggerPath {outputFolder}" +} + diff --git a/eng/tools/typespec-migration-validation/src/configuration.ts b/eng/tools/typespec-migration-validation/src/configuration.ts new file mode 100644 index 000000000000..50e897923a51 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/configuration.ts @@ -0,0 +1,11 @@ +interface configuration { + ignoreDescription: boolean; + enumNameToCamelCase: boolean; + ignorePathCase: boolean; +} + +export const configuration: configuration = { + ignoreDescription: true, + enumNameToCamelCase: true, + ignorePathCase: false, // Normalize the segments before provider +}; diff --git a/eng/tools/typespec-migration-validation/src/document.ts b/eng/tools/typespec-migration-validation/src/document.ts new file mode 100644 index 000000000000..88dd66b6a9aa --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/document.ts @@ -0,0 +1,435 @@ +import { + OpenAPI2Document, + OpenAPI2PathItem, + HttpMethod, + OpenAPI2Operation, + OpenAPI2Schema, + OpenAPI2Parameter, + Ref, + Refable, + OpenAPI2Response, + OpenAPI2SchemaProperty, + OpenAPI2SchemaRefProperty, +} from "@azure-tools/typespec-autorest"; +import { + isApiVersionParameter, + isResourceGroupNameParameter, + isSubscriptionIdParameter, +} from "./parameter.js"; +import { configuration } from "./configuration.js"; + +let originalDocument: OpenAPI2Document | undefined = undefined; + +export function processDocument(document: OpenAPI2Document): OpenAPI2Document { + originalDocument = deepCopy(document); + + const newDocument: OpenAPI2Document = deepCopy(document); + if (document.schemes) { + delete newDocument.schemes; + } + if (document.host) { + delete newDocument.host; + } + if (document.security) { + delete newDocument.security; + } + if (document.securityDefinitions) { + delete newDocument.securityDefinitions; + } + if (document.tags) { + delete newDocument.tags; + } + if (document.info && document.info["x-typespec-generated"]) { + delete newDocument.info["x-typespec-generated"]; + } + + for (const route in document.paths) { + const path = document.paths[route] as OpenAPI2PathItem; + const processedPath = processPath(path); + if (configuration.ignorePathCase) { + const normalizedRoute = route + .replace(/\/resourcegroups\//i, "/resourceGroups/") + .replace(/\/subscriptions\//i, "/subscriptions/"); + delete newDocument.paths[route]; + newDocument.paths[normalizedRoute] = processedPath; + } else { + newDocument.paths[route] = processedPath; + } + } + + for (const definitionName in document.definitions) { + const definition = document.definitions[definitionName] as OpenAPI2Schema; + if (definition.enum) { + delete newDocument.definitions![definitionName]; + continue; + } + const processedDefinition = processDefinition(definition); + newDocument.definitions ??= {}; + newDocument.definitions[definitionName] = processedDefinition; + } + + newDocument.parameters = {}; + return newDocument; +} + +function processPath(path: OpenAPI2PathItem): OpenAPI2PathItem { + function isHttpMethod(key: string): key is HttpMethod { + const httpMethods: HttpMethod[] = [ + "get", + "put", + "post", + "delete", + "options", + "head", + "patch", + "trace", + ]; + return httpMethods.includes(key as HttpMethod); + } + + const newPath: OpenAPI2PathItem = deepCopy(path); + for (const verb in path) { + if (isHttpMethod(verb)) { + const operation = path[verb] as OpenAPI2Operation; + const processedOperation = processOperation(operation); + newPath[verb] = processedOperation; + } + } + + return newPath; +} + +function processOperation(operation: OpenAPI2Operation): OpenAPI2Operation { + const newOperation = deepCopy(operation); + let index = newOperation.parameters.findIndex((p) => isApiVersionParameter(p)); + if (index > -1) { + newOperation.parameters.splice(index, 1); + } + index = newOperation.parameters.findIndex((p) => isSubscriptionIdParameter(p)); + if (index > -1) { + newOperation.parameters.splice(index, 1); + } + index = newOperation.parameters.findIndex((p) => isResourceGroupNameParameter(p)); + if (index > -1) { + newOperation.parameters.splice(index, 1); + } + newOperation.parameters = newOperation.parameters.map((p) => processParameter(p)); + + for (const response in operation.responses) { + const responseObject = operation.responses[response] as OpenAPI2Response; + const processedResponse = processResponse(responseObject); + newOperation.responses ??= {}; + newOperation.responses[response] = processedResponse; + } + + if (newOperation["x-ms-long-running-operation"] === false) { + delete newOperation["x-ms-long-running-operation"]; + } + if ( + newOperation["x-ms-long-running-operation-options"] && + newOperation["x-ms-long-running-operation-options"]["final-state-via"] === "location" + ) { + delete newOperation["x-ms-long-running-operation-options"]; + } + + if (newOperation["x-ms-pageable"] && newOperation["x-ms-pageable"]["nextLinkName"] === null) { + newOperation["x-ms-pageable"]["nextLinkName"] = "nextLink"; + } + if (newOperation["x-ms-pageable"] && newOperation["x-ms-pageable"]["itemName"] === "value") { + delete newOperation["x-ms-pageable"]["itemName"]; + } + + if ( + newOperation.produces && + ((newOperation.produces.length === 1 && newOperation.produces[0] === "application/json") || + newOperation.produces.length === 0) + ) { + delete newOperation.produces; + } + if ( + newOperation.consumes && + ((newOperation.consumes.length === 1 && newOperation.consumes[0] === "application/json") || + newOperation.consumes.length === 0) + ) { + delete newOperation.consumes; + } + + if (newOperation.tags) { + delete newOperation.tags; + } + + if (newOperation.deprecated === false) { + delete newOperation.deprecated; + } + + if (configuration.ignoreDescription) { + delete newOperation.description; + delete newOperation.summary; + } + return newOperation; +} + +function processResponse(response: OpenAPI2Response): OpenAPI2Response { + const newResponse: OpenAPI2Response = deepCopy(response); + newResponse.description = "ignore"; + if (newResponse.headers) { + for (const header in newResponse.headers) { + if (header === "Location" || header === "Retry-After" || header === "Azure-AsyncOperation") { + delete newResponse.headers[header]; + } + } + } + if (newResponse.headers && Object.keys(newResponse.headers).length === 0) { + delete newResponse.headers; + } + return newResponse; +} + +function processParameter(parameter: Refable): Refable { + const newParameter: Refable = deepCopy(parameter); + if ((parameter as Ref).$ref) { + const refPath = (parameter as Ref).$ref; + if (refPath.startsWith("#/parameters/")) { + const parameterName = refPath.substring("#/parameters/".length); + const originalParameter = originalDocument?.parameters?.[parameterName]; + if (originalParameter) { + return processParameter(originalParameter); + } + } + } else { + const inlineParameter = parameter as OpenAPI2Parameter; + if ((parameter as any).enum && (newParameter as any)["x-ms-enum"]?.["values"]) { + delete (newParameter as any)["x-ms-enum"]["values"]; + } + if (configuration.enumNameToCamelCase && (newParameter as any)["x-ms-enum"]?.["name"]) { + const enumName = (newParameter as any)["x-ms-enum"]["name"] as string; + const camelCaseName = enumName.charAt(0).toUpperCase() + enumName.slice(1); + (newParameter as any)["x-ms-enum"]["name"] = camelCaseName; + } + + for (const key in parameter) { + if (key === "x-ms-client-flatten") { + delete (newParameter as any)[key]; + } + if (key === "required" && inlineParameter[key] !== true) { + delete (newParameter as any)[key]; + } + if (key === "description") { + delete (newParameter as any)[key]; + } + if (key === "x-ms-parameter-location" && inlineParameter[key] === "method") { + delete (newParameter as any)[key]; + } + } + } + return newParameter; +} + +function processDefinition(definition: OpenAPI2Schema): OpenAPI2Schema { + const newDefinition: OpenAPI2Schema = deepCopy(definition); + for (const propertyName in definition.properties) { + const property = definition.properties[propertyName] as OpenAPI2SchemaProperty; + const processedProperty = processProperty(property); + newDefinition.properties ??= {}; + newDefinition.properties[propertyName] = processedProperty; + } + processEnumInplace(newDefinition); + if (newDefinition.additionalProperties === false) { + delete newDefinition.additionalProperties; + } + + if ( + (newDefinition.properties || newDefinition.additionalProperties) && + newDefinition.type === undefined + ) { + newDefinition.type = "object"; + } + + if (newDefinition.allOf) { + newDefinition.allOf = newDefinition.allOf.map((item) => { + if ((item as Ref).$ref) { + const refPath = (item as Ref).$ref; + if ( + refPath === + "../../../../../common-types/resource-management/v3/types.json#/definitions/Resource" + ) { + return { + ...item, + $ref: "../../../../../common-types/resource-management/v3/types.json#/definitions/ProxyResource", + }; + } + } + return item; + }); + } + + if (newDefinition.required) { + newDefinition.required = newDefinition.required.sort((a, b) => a.localeCompare(b)); + } + if (configuration.ignoreDescription) { + delete newDefinition.description; + if ((newDefinition.items as any)?.description) { + delete (newDefinition.items as any).description; + } + } + + if ((newDefinition as any)["x-ms-azure-resource"]) { + delete (newDefinition as any)["x-ms-azure-resource"]; + } + + return processPageModel(newDefinition); +} + +function processProperty(property: OpenAPI2SchemaProperty): OpenAPI2SchemaProperty { + function isOpenAPI2SchemaRefProperty( + prop: OpenAPI2SchemaProperty, + ): prop is OpenAPI2SchemaRefProperty { + return (prop as OpenAPI2SchemaRefProperty).$ref !== undefined; + } + + const newProperty: OpenAPI2SchemaProperty = deepCopy(property); + const isRef = isOpenAPI2SchemaRefProperty(newProperty); + if (isRef) { + const refPath = newProperty.$ref; + if (refPath.startsWith("#/definitions/")) { + const definitionName = refPath.substring("#/definitions/".length); + const originalDefinition = originalDocument?.definitions?.[definitionName]; + if (originalDefinition && originalDefinition.enum) { + const processedDefinition = processDefinition(originalDefinition); + for (const key in processedDefinition) { + (newProperty as any)[key] = (processedDefinition as any)[key as string]; + } + delete (newProperty as any).$ref; + } else if ( + originalDefinition?.type && + ["boolean", "integer", "number", "string"].includes(originalDefinition.type) + ) { + delete (newProperty as any).$ref; + for (const key in originalDefinition) { + (newProperty as any)[key] = (originalDefinition as any)[key]; + } + } + } + } else { + if (newProperty.type === "array" && newProperty.items) { + const refPath = (newProperty.items as Ref).$ref; + if (refPath !== undefined) { + if (refPath.startsWith("#/definitions/")) { + const definitionName = refPath.substring("#/definitions/".length); + const originalDefinition = originalDocument?.definitions?.[definitionName]; + if (originalDefinition && originalDefinition.enum) { + const processedDefinition = processDefinition(originalDefinition); + for (const key in processedDefinition) { + (newProperty.items as any)[key] = (processedDefinition as any)[key as string]; + } + delete (newProperty.items as any).$ref; + } + } + } else { + processEnumInplace(newProperty.items as OpenAPI2Schema); + } + } + + processEnumInplace(newProperty); + if ( + (newProperty.properties || newProperty.additionalProperties) && + newProperty.type === undefined + ) { + newProperty.type = "object"; + } + } + + const identifiers = (newProperty as any)["x-ms-identifiers"]; + if (identifiers && Array.isArray(identifiers) && identifiers.length === 0) { + delete (newProperty as any)["x-ms-identifiers"]; + } + if ((newProperty as OpenAPI2Schema).uniqueItems === false) { + delete (newProperty as OpenAPI2Schema).uniqueItems; + } + if (newProperty["x-ms-mutability"]) { + newProperty["x-ms-mutability"] = newProperty["x-ms-mutability"].sort((a, b) => + a.localeCompare(b), + ); + } + if ((newProperty as any)["uniqueItems"] === false) { + delete (newProperty as any)["uniqueItems"]; + } + + if (configuration.ignoreDescription) { + delete newProperty.description; + if ((newProperty as any).items?.description) { + delete (newProperty as any).items?.description; + } + } + return newProperty; +} + +function processEnumInplace(enumDefinition: OpenAPI2Schema) { + if (enumDefinition.enum === undefined) return; + + if (enumDefinition["x-ms-enum"]?.values) { + delete enumDefinition["x-ms-enum"].values; + } + if (configuration.enumNameToCamelCase && enumDefinition["x-ms-enum"]?.name) { + const enumName = enumDefinition["x-ms-enum"].name as string; + const camelCaseName = enumName.charAt(0).toUpperCase() + enumName.slice(1); + enumDefinition["x-ms-enum"].name = camelCaseName; + } +} + +function processPageModel(definition: OpenAPI2Schema): OpenAPI2Schema { + const newDefinition: OpenAPI2Schema = deepCopy(definition); + + const propertyCount = Object.keys(definition.properties ?? {}).length; + if (propertyCount !== 2) { + return newDefinition; + } + + const valueProperty = definition.properties?.["value"]; + if (!valueProperty || (valueProperty as OpenAPI2Schema).type !== "array") { + return newDefinition; + } + + const nextLinkProperty = definition.properties?.["nextLink"]; + if (!nextLinkProperty || (nextLinkProperty as OpenAPI2Schema).type !== "string") { + return newDefinition; + } + + newDefinition.description = "[Placeholder] Discription for page model"; + newDefinition.properties!["value"]!.description = "[Placeholder] Discription for value property"; + newDefinition.properties!["nextLink"]!.description = + "[Placeholder] Discription for nextLink property"; + (newDefinition.properties!["nextLink"] as any)["format"] = "uri"; + if (newDefinition.properties!["nextLink"]?.readOnly) { + delete newDefinition.properties!["nextLink"]?.readOnly; + } + + return newDefinition; +} + +function deepCopy(value: T): T { + if (value === null || value === undefined) { + return value; + } + + if (typeof value !== "object") { + return value; + } + + if (value instanceof Date) { + return new Date(value.getTime()) as unknown as T; + } + + if (Array.isArray(value)) { + return value.map((item) => deepCopy(item)) as unknown as T; + } + + const result: Record = {}; + for (const key in value) { + if (Object.prototype.hasOwnProperty.call(value, key)) { + result[key] = deepCopy((value as Record)[key]); + } + } + + return result as T; +} diff --git a/eng/tools/typespec-migration-validation/src/fix/definition.ts b/eng/tools/typespec-migration-validation/src/fix/definition.ts new file mode 100644 index 000000000000..4cb5c2cf2639 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/fix/definition.ts @@ -0,0 +1,138 @@ +import { Suggestion } from "../jsonOutput.js"; +import { constructJsonPath } from "../summary.js"; +import { getPropertyName } from "./helper.js"; + +const knownPropertyDecoratorMapping: { [key: string]: string } = { + minimum: "minValue", + maximum: "maxValue", + minLength: "minLength", + maxLength: "maxLength", +}; + +const addedKey = "__added"; +const deletedKey = "__deleted"; + +export function handleAdded(diff: { + path: string; + value: string; + key: string; +}): Suggestion | undefined { + const { path, value, key } = diff; + if (key.endsWith(addedKey)) { + const originalKey = key.slice(0, addedKey.length); // Remove '__added' suffix + const property = getPropertyName(path); + + if (originalKey === "x-ms-client-name") { + if (property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find this TypeSpec statement @@clientName(${definitionName}.${propertyName}, "${value}") in file back-compatible.tsp or client.tsp. Delete this statement.`, + path: constructJsonPath(path, key), + }; + } + } + } + + return undefined; +} + +export function handleDeleted(diff: { + path: string; + value: string; + key: string; +}): Suggestion | undefined { + const { path, value, key } = diff; + if (key.endsWith(deletedKey)) { + const originalKey = key.slice(0, deletedKey.length); // Remove '__deleted' suffix + const property = getPropertyName(path); + + if (originalKey === "x-ms-client-name") { + if (property) { + const [definitionName, propertyName] = property; + const suggestion = `@@clientName(${definitionName}.${propertyName}, "${value}");`; + return { + suggestion: `Find file "back-compatible.tsp" or "client.tsp" and add the following statement exactly as it is:: + \`\`\`typespec + ${suggestion} + \`\`\``, + path: constructJsonPath(path, key), + }; + } + } else if (originalKey === "x-ms-client-flatten") { + if ((value as any) === true && property) { + const [definitionName, propertyName] = property; + const suggestion = `@@flattenProperty(${definitionName}.${propertyName});`; + return { + suggestion: `Find file "back-compatible.tsp" or "client.tsp" and add the following statement exactly as it is:: + \`\`\`typespec + ${suggestion} + \`\`\``, + path: constructJsonPath(path, key), + }; + } + } else if (Object.keys(knownPropertyDecoratorMapping).includes(originalKey)) { + const decoratorName = knownPropertyDecoratorMapping[originalKey]; + if (property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find a model called "${definitionName}". Add \`@${decoratorName}(${value})\` onto its property "${propertyName}". If the property cannot access directly, add \`@@${decoratorName}(${definitionName}.${propertyName}, ${value});\` right after the model.`, + path: constructJsonPath(path, key), + }; + } + } else if (originalKey === "x-nullable") { + if ((value as any) === true && property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find a model called "${definitionName}". Change its property "${propertyName}" by adding \` | null\` to its property type.`, + path: constructJsonPath(path, key), + }; + } + } else if (originalKey === "readOnly") { + if ((value as any) === true && property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find a model called "${definitionName}". Add \`@visibility(Lifecycle.Read)\` onto its property "${propertyName}". If the property cannot access directly, add \`@@visibility(${definitionName}.${propertyName}, Lifecycle.Read);\` RIGHT AFTER the end bracket of the model.`, + path: constructJsonPath(path, key), + }; + } + } else if (originalKey === "x-ms-secret") { + if ((value as any) === true && property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find a model called "${definitionName}". Add \`@secret\` onto its property "${propertyName}". If the property cannot access directly, add \`@@secret(${definitionName}.${propertyName});\` right after the model.`, + path: constructJsonPath(path, key), + }; + } + } else if (originalKey === "default") { + if (property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find a model called "${definitionName}". Change its property "${propertyName}" by adding \` = ${typeof value === "string" ? `"${value}"` : value}\`.`, + path: constructJsonPath(path, key), + }; + } + } + } + + return undefined; +} + +export function handleChanged(diff: { + path: string; + oldValue: string; + newValue: string; + key: string; +}): Suggestion | undefined { + const { path, oldValue, newValue, key } = diff; + const property = getPropertyName(path); + if (key === "x-ms-client-name") { + if (property) { + const [definitionName, propertyName] = property; + return { + suggestion: `Find this TypeSpec statement @@clientName(${definitionName}.${propertyName}, "${newValue}") in file back-compatible.tsp or client.tsp. Change it to @@clientName(${definitionName}.${propertyName}, "${oldValue}")`, + path: path, + }; + } + } + return undefined; +} diff --git a/eng/tools/typespec-migration-validation/src/fix/helper.ts b/eng/tools/typespec-migration-validation/src/fix/helper.ts new file mode 100644 index 000000000000..de9b5258f93a --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/fix/helper.ts @@ -0,0 +1,199 @@ +export function checkElementAddedOrDeleted( + jsonObj: any, + currentPath: string = "", +): Array<{ path: string; value: string; key: string }> { + const results: Array<{ path: string; value: string; key: string }> = []; + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + const newPath = currentPath ? `${currentPath}.${key}` : key; + + if (key.endsWith("__deleted") || key.endsWith("__added")) { + // Store both the path and the value + results.push({ + path: currentPath, // Use parent path since we're interested in the property that has this extension + value: jsonObj[key], + key: key, + }); + } + + // If value is an object or array, recursively search it + if (jsonObj[key] && typeof jsonObj[key] === "object") { + const nestedResults = checkElementAddedOrDeleted(jsonObj[key], newPath); + results.push(...nestedResults); + } + } + + return results; +} + +export function checkElementChanged( + jsonObj: any, + currentPath: string = "", +): Array<{ path: string; oldValue: string; newValue: string; key: string }> { + const results: Array<{ path: string; oldValue: string; newValue: string; key: string }> = []; + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + const newPath = currentPath ? `${currentPath}.${key}` : key; + + if ( + typeof jsonObj[key] === "object" && + jsonObj[key]["__old"] !== undefined && + jsonObj[key]["__new"] !== undefined + ) { + // Store the path, old value and new value + results.push({ + path: currentPath, // Use parent path since we're interested in the property that has this extension + oldValue: jsonObj[key]["__old"], + newValue: jsonObj[key]["__new"], + key: key, + }); + } + + // If value is an object or array, recursively search it + if (jsonObj[key] && typeof jsonObj[key] === "object") { + const nestedResults = checkElementChanged(jsonObj[key], newPath); + results.push(...nestedResults); + } + } + + return results; +} + +export function checkPropertyAttributeDeleted( + checkKey: string, + jsonObj: any, + currentPath: string = "", +): Array<{ path: string; value: string; key: string }> { + const results: Array<{ path: string; value: string; key: string }> = []; + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + const newPath = currentPath ? `${currentPath}.${key}` : key; + + if (key === `${checkKey}__deleted`) { + // Store both the path and the value + results.push({ + path: currentPath, // Use parent path since we're interested in the property that has this extension + value: jsonObj[key], + key: key, + }); + } + + // If value is an object or array, recursively search it + if (jsonObj[key] && typeof jsonObj[key] === "object") { + const nestedResults = checkPropertyAttributeDeleted(checkKey, jsonObj[key], newPath); + results.push(...nestedResults); + } + } + + return results; +} + +export function checkPropertyAttributeAdded( + checkKey: string, + jsonObj: any, + currentPath: string = "", +): Array<{ path: string; value: string; key: string }> { + const results: Array<{ path: string; value: string; key: string }> = []; + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + const newPath = currentPath ? `${currentPath}.${key}` : key; + if (key === `${checkKey}__added`) { + // Store both the path and the value + results.push({ + path: currentPath, // Use parent path since we're interested in the property that has this extension + value: jsonObj[key], + key: key, + }); + } + + if (jsonObj[key] && typeof jsonObj[key] === "object") { + const nestedResults = checkPropertyAttributeAdded(checkKey, jsonObj[key], newPath); + results.push(...nestedResults); + } + } + return results; +} + +export function checkPropertyAttributeChanged( + checkKey: string, + jsonObj: any, + currentPath: string = "", +): Array<{ path: string; oldValue: string; newValue: string }> { + const results: Array<{ path: string; oldValue: string; newValue: string }> = []; + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + const newPath = currentPath ? `${currentPath}.${key}` : key; + + if ( + key === checkKey && + typeof jsonObj[key] === "object" && + jsonObj[key]["__old"] !== undefined && + jsonObj[key]["__new"] !== undefined + ) { + // Store the path, old value and new value + results.push({ + path: currentPath, // Use parent path since we're interested in the property that has this extension + oldValue: jsonObj[key]["__old"], + newValue: jsonObj[key]["__new"], + }); + } + + // If value is an object or array, recursively search it + if (jsonObj[key] && typeof jsonObj[key] === "object") { + const nestedResults = checkPropertyAttributeChanged(checkKey, jsonObj[key], newPath); + results.push(...nestedResults); + } + } + + return results; +} + +export function getPropertyName( + jsonPath: string, +): [definitionName: string, propertyName: string] | undefined { + const pathParts = jsonPath.split("."); + const definitionIndex = pathParts.findIndex((part) => part === "definitions"); + if (definitionIndex !== -1 && definitionIndex + 3 < pathParts.length) { + const definitionName = pathParts[definitionIndex + 1]; + const propertyName = pathParts[definitionIndex + 3]; + return [definitionName, propertyName]; + } + return undefined; +} diff --git a/eng/tools/typespec-migration-validation/src/fix/troubleshooting.ts b/eng/tools/typespec-migration-validation/src/fix/troubleshooting.ts new file mode 100644 index 000000000000..ca5af741fd33 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/fix/troubleshooting.ts @@ -0,0 +1,39 @@ +import { jsonOutput, Suggestion } from "../jsonOutput.js"; +import { handleAdded, handleChanged, handleDeleted } from "./definition.js"; +import { checkElementAddedOrDeleted, checkElementChanged } from "./helper.js"; + +export function generatePrompts(jsonObj: any): string[] { + const suggestedFixes: Suggestion[] = []; + + const elementAddedOrDeleted = checkElementAddedOrDeleted(jsonObj); + for (const change of elementAddedOrDeleted) { + const addedSuggestion = handleAdded(change); + if (addedSuggestion) { + suggestedFixes.push(addedSuggestion); + } + const deletedSuggestion = handleDeleted(change); + if (deletedSuggestion) { + suggestedFixes.push(deletedSuggestion); + } + } + + const elementChanged = checkElementChanged(jsonObj); + for (const change of elementChanged) { + const changedSuggestion = handleChanged(change); + if (changedSuggestion) { + suggestedFixes.push(changedSuggestion); + } + } + + const suggestionsAsString = suggestedFixes.map((s) => s.suggestion); + if (suggestedFixes.length > 0) { + jsonOutput.suggestions.push(...suggestedFixes); + suggestionsAsString.unshift( + `You are an expert in TypeSpec. Follow the prompt exactly as written. Do not add any additional suggestions or modifications unless explicitly requested.`, + ); + for (let i = 1; i < suggestionsAsString.length; i++) { + suggestionsAsString[i] = `${i}. ${suggestionsAsString[i]}`; + } + } + return suggestionsAsString; +} diff --git a/eng/tools/typespec-migration-validation/src/helper.ts b/eng/tools/typespec-migration-validation/src/helper.ts new file mode 100644 index 000000000000..0f100d12028c --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/helper.ts @@ -0,0 +1,120 @@ +import { OpenAPI2Document } from "@azure-tools/typespec-autorest"; +import fs from "fs"; +import path from "path"; +import { logWarning } from "./log.js"; + +/** + * Reads all files in a directory recursively, excluding paths containing a specified string + * @param directoryPath The directory to read files from + * @param excludePattern String pattern to exclude from paths (case insensitive) + * @returns Array of file paths + */ +function readFilesFromDirectory( + directoryPath: string, + excludePattern: string = "example", +): string[] { + const results: string[] = []; + + function traverseDirectory(currentPath: string): void { + const files = fs.readdirSync(currentPath); + + for (const file of files) { + const filePath = path.join(currentPath, file); + const stats = fs.statSync(filePath); + + // Skip paths containing the exclude pattern + if (filePath.toLowerCase().includes(excludePattern.toLowerCase())) { + continue; + } + + // Skip paths that are not json files + if (!filePath.endsWith(".json")) { + continue; + } + + if (stats.isDirectory()) { + traverseDirectory(filePath); + } else { + results.push(filePath); + } + } + } + + traverseDirectory(directoryPath); + return results; +} + +/** + * Reads the contents of a file + * @param filePath Path to the file + * @returns File contents as string + */ +export function readFileContent(filePath: string): string { + return fs.readFileSync(filePath, "utf8"); +} + +export function mergeFiles(folderPath: string): OpenAPI2Document { + const files = readFilesFromDirectory(folderPath, "example"); + const mergedContent: OpenAPI2Document = { + swagger: "2.0", + info: { title: "placeholder", version: "placeholder" }, + paths: {}, + }; + + for (const file of files) { + const fileContent = readFileContent(file); + const jsonContent: OpenAPI2Document = JSON.parse(fileContent); + mergedContent.info = jsonContent.info; + + for (const consumer of jsonContent.consumes ?? []) { + if (!mergedContent.consumes) { + mergedContent.consumes = []; + } + if (!mergedContent.consumes.includes(consumer)) { + mergedContent.consumes.push(consumer); + } + } + for (const producer of jsonContent.produces ?? []) { + if (!mergedContent.produces) { + mergedContent.produces = []; + } + if (!mergedContent.produces.includes(producer)) { + mergedContent.produces.push(producer); + } + } + + for (const pathKey in jsonContent.paths) { + const pathValue = jsonContent.paths[pathKey]; + if (!mergedContent.paths[pathKey]) { + mergedContent.paths[pathKey] = pathValue!; + } else { + // Merge the paths if they already exist + mergedContent.paths[pathKey] = { ...mergedContent.paths[pathKey], ...pathValue }; + } + } + + for (const parameterKey in jsonContent.parameters) { + if (!mergedContent.parameters) { + mergedContent.parameters = {}; + } + if (!mergedContent.parameters[parameterKey]) { + mergedContent.parameters[parameterKey] = jsonContent.parameters[parameterKey]!; + } else { + logWarning(`Duplicate parameter key found: ${parameterKey}. Keeping the first one.`); + } + } + + for (const definitionKey in jsonContent.definitions) { + if (!mergedContent.definitions) { + mergedContent.definitions = {}; + } + if (!mergedContent.definitions[definitionKey]) { + mergedContent.definitions[definitionKey] = jsonContent.definitions[definitionKey]!; + } else { + logWarning(`Duplicate definition key found: ${definitionKey}. Keeping the first one.`); + } + } + } + + return mergedContent; +} diff --git a/eng/tools/typespec-migration-validation/src/ignore.ts b/eng/tools/typespec-migration-validation/src/ignore.ts new file mode 100644 index 000000000000..5ff5f8cc4f4e --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/ignore.ts @@ -0,0 +1,272 @@ +/** + * User could choose to ignore certain difference report + */ + +import { OpenAPI2Document } from "@azure-tools/typespec-autorest"; + +const ignoreList: Set = new Set(); + +export function addIgnorePath(path: string): void { + ignoreList.add(path); +} + +export function processIgnoreList( + sortedOldFile: OpenAPI2Document, + sortedNewFile: OpenAPI2Document, +): void { + // Process each path in the ignore list + for (const path of ignoreList) { + if (path.endsWith("__added")) { + const realPath = path.replace(/__added$/, ""); + // Delete the added element from the new file + deleteElementByJsonPath(sortedNewFile, realPath); + } else if (path.endsWith("__deleted")) { + const realPath = path.replace(/__deleted$/, ""); + deleteElementByJsonPath(sortedOldFile, realPath); + } else { + const oldValue = getElementByJsonPath(sortedOldFile, path); + if (oldValue !== undefined) { + setElementByJsonPath(sortedNewFile, path, oldValue); + } + } + } +} + +/** + * Parses a JSON path string that may contain segments with dots in bracket notation + * Format can be either dot notation (a.b.c) or mixed with bracket notation for segments with dots (a['b.c.d'].e) + * Also properly handles array references by keeping them with their parent segment + * @param path The JSON path to parse + * @returns Array of path segments + */ +function parseJsonPath(path: string): string[] { + const segments: string[] = []; + let currentSegment = ""; + let inBracket = false; + let inArrayNotation = false; + let bracketDepth = 0; + + for (let i = 0; i < path.length; i++) { + const char = path[i]; + + // Handle start of bracket notation for property access + if ((char === "[" && !inBracket && path[i + 1] === "'") || path[i + 1] === '"') { + // Start of bracket notation for property with special chars + if (currentSegment) { + segments.push(currentSegment); + currentSegment = ""; + } + inBracket = true; + continue; + } + // Handle start of array index notation + else if (char === "[" && !inBracket) { + // This is an array index notation, keep it with the current segment + inArrayNotation = true; + bracketDepth++; + currentSegment += char; + continue; + } + // Handle quotes in bracket notation + else if (inBracket && (char === "'" || char === '"')) { + continue; + } + // Handle end of bracket notation for property access + else if (char === "]" && inBracket) { + // End of bracket notation + segments.push(currentSegment); + currentSegment = ""; + inBracket = false; + continue; + } + // Handle end of array index notation + else if (char === "]" && inArrayNotation) { + // End of array bracket, keep it as part of the segment + bracketDepth--; + if (bracketDepth === 0) { + inArrayNotation = false; + } + currentSegment += char; + continue; + } + // Handle dot separator + else if (char === "." && !inBracket && !inArrayNotation) { + // Dot separator (only when not in bracket or array notation) + if (currentSegment) { + segments.push(currentSegment); + currentSegment = ""; + } + continue; + } + + // Regular character, add to current segment + currentSegment += char; + } + + // Add the last segment if any + if (currentSegment) { + segments.push(currentSegment); + } + + return segments; +} + +/** + * Deletes an element from an object using a parsed JSON path + * @param obj The object to modify + * @param path The path to the element to delete (e.g., "paths./users.get") + */ +function deleteElementByJsonPath(obj: any, path: string): void { + const segments = parseJsonPath(path); + + let current = obj; + + // Navigate to the parent of the element to delete + for (let i = 0; i < segments.length - 1; i++) { + const segment = segments[i]; + + // Handle array index notation [n] + const arrayMatch = segment.match(/^(.*?)\[(\d+)\]$/); + if (arrayMatch) { + const arrayName = arrayMatch[1]; + const arrayIndex = parseInt(arrayMatch[2], 10); + + if ( + !current[arrayName] || + !Array.isArray(current[arrayName]) || + arrayIndex >= current[arrayName].length + ) { + // Path doesn't exist, nothing to delete + return; + } + current = current[arrayName][arrayIndex]; + } else { + if (!current[segment] || typeof current[segment] !== "object") { + // Path doesn't exist, nothing to delete + return; + } + current = current[segment]; + } + } + + // Delete the element + const lastSegment = segments[segments.length - 1]; + + // Handle array index notation for the last part + const arrayMatch = lastSegment.match(/^(.*?)\[(\d+)\]$/); + if (arrayMatch) { + const arrayName = arrayMatch[1]; + const arrayIndex = parseInt(arrayMatch[2], 10); + + if ( + current[arrayName] && + Array.isArray(current[arrayName]) && + arrayIndex < current[arrayName].length + ) { + current[arrayName].splice(arrayIndex, 1); + } + } else { + delete current[lastSegment]; + } +} + +/** + * Gets an element from an object using a dot-notation JSON path + * @param obj The object to query + * @param path The path to the element (e.g., "paths./users.get") + * @returns The value at the specified path or undefined if not found + */ +function getElementByJsonPath(obj: any, path: string): any { + const parts = parseJsonPath(path); + let current = obj; + + for (const part of parts) { + // Handle array index notation + const arrayMatch = part.match(/^(.*)\[(\d+)\]$/); + if (arrayMatch) { + const arrayName = arrayMatch[1]; + const arrayIndex = parseInt(arrayMatch[2], 10); + + if ( + !current[arrayName] || + !Array.isArray(current[arrayName]) || + arrayIndex >= current[arrayName].length + ) { + return undefined; + } + current = current[arrayName][arrayIndex]; + } else { + if (current[part] === undefined) { + return undefined; + } + current = current[part]; + } + } + + return current; +} + +/** + * Sets an element in an object using a dot-notation JSON path + * @param obj The object to modify + * @param path The path where to set the element (e.g., "paths./users.get") + * @param value The value to set + */ +function setElementByJsonPath(obj: any, path: string, value: any): void { + const parts = parseJsonPath(path); + let current = obj; + + // Navigate to the parent of where we want to set the value + for (let i = 0; i < parts.length - 1; i++) { + const part = parts[i]; + + // Handle array index notation + const arrayMatch = part.match(/^(.*)\[(\d+)\]$/); + if (arrayMatch) { + const arrayName = arrayMatch[1]; + const arrayIndex = parseInt(arrayMatch[2], 10); + + // Ensure the array exists + if (!current[arrayName]) { + current[arrayName] = []; + } + + // Ensure the array is long enough + while (current[arrayName].length <= arrayIndex) { + current[arrayName].push({}); + } + + current = current[arrayName][arrayIndex]; + } else { + // Ensure the object exists + if (!current[part]) { + current[part] = {}; + } + current = current[part]; + } + } + + // Set the value + const lastPart = parts[parts.length - 1]; + + // Handle array index notation for the last part + const arrayMatch = lastPart.match(/^(.*)\[(\d+)\]$/); + if (arrayMatch) { + const arrayName = arrayMatch[1]; + const arrayIndex = parseInt(arrayMatch[2], 10); + + // Ensure the array exists + if (!current[arrayName]) { + current[arrayName] = []; + } + + // Ensure the array is long enough + while (current[arrayName].length <= arrayIndex) { + current[arrayName].push(undefined); + } + + current[arrayName][arrayIndex] = value; + } else { + current[lastPart] = value; + } +} diff --git a/eng/tools/typespec-migration-validation/src/index.ts b/eng/tools/typespec-migration-validation/src/index.ts new file mode 100644 index 000000000000..db2fccaaeb68 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/index.ts @@ -0,0 +1,244 @@ +import { sortOpenAPIDocument } from "@azure-tools/typespec-autorest"; +import fs from "fs"; +import { diff } from "json-diff"; +import yargs from "yargs"; +import { hideBin } from "yargs/helpers"; +import { configuration } from "./configuration.js"; +import { processDocument } from "./document.js"; +import { generatePrompts } from "./fix/troubleshooting.js"; +import { mergeFiles, readFileContent } from "./helper.js"; +import { addIgnorePath, processIgnoreList } from "./ignore.js"; +import { jsonOutput } from "./jsonOutput.js"; +import { logHeader, logWarning } from "./log.js"; +import { + findChangedPaths, + findDifferences, + findModifiedValues, + formatChangedPathsReport, + formatDifferenceReport, + formatModifiedValuesReport, +} from "./summary.js"; + +function parseArguments() { + return yargs(hideBin(process.argv)) + .usage("Usage: $0 [options]") + .command( + "add-ignore", + "Add paths to ignore file", + (yargs) => { + return yargs + .option("path", { + alias: "p", + describe: "JSON path to ignore", + type: "string", + demandOption: true, + }) + .option("outputFolder", { + alias: "out", + describe: "Output folder containing ignore.json", + type: "string", + demandOption: true, + }); + }, + (argv) => { + handleAddIgnore(argv.path as string, argv.outputFolder as string); + }, + ) + .example( + "$0 --oldPath ./old-spec-folder --newPath ./new-spec-file", + "Compare two swagger specs", + ) + .example("$0 oldSpecPath newSpecPath", "Compare using positional arguments") + .example( + "$0 add-ignore --path \"paths['/api/resource'].put.parameters[0].required__added\" --outputFolder ./results", + "Add a path to ignore file", + ) + .option("oldPath", { + alias: "o", + describe: "Path to old/original Swagger specification folder", + type: "string", + }) + .option("newPath", { + alias: "n", + describe: "Path to new/updated Swagger specification file", + type: "string", + }) + .option("outputFolder", { + alias: "out", + describe: "Output folder for analysis results", + type: "string", + }) + .option("ignoreDescription", { + description: "Ignore description differences", + type: "boolean", + default: true, + }) + .option("ignorePathCase", { + description: "Set case insensitive for the segments before provider, e.g. resourceGroups", + type: "boolean", + }) + .option("jsonOutput", { + description: "Also output in JSON format", + type: "boolean", + }) + .check((argv) => { + // Skip validation for the add-ignore command + if (argv._[0] === "add-ignore") { + return true; + } + + const positional = argv._; + if (!argv.oldPath && positional.length > 0) { + argv.oldPath = positional[0]!.toString(); + } + if (!argv.newPath && positional.length > 1) { + argv.newPath = positional[1]!.toString(); + } + if (!argv.outputFolder && positional.length > 2) { + argv.outputFolder = positional[2]!.toString(); + } + + if (!argv.oldPath || !argv.newPath) { + throw new Error("Both oldPath and newPath are required"); + } + + // Verify paths exist + if (!fs.existsSync(argv.oldPath)) { + throw new Error(`oldPath does not exist: ${argv.oldPath}`); + } + + if (!fs.existsSync(argv.newPath)) { + throw new Error(`newPath does not exist: ${argv.newPath}`); + } + + return true; + }) + .help() + .alias("help", "h") + .parseSync(); +} + +/** + * Handles the add-ignore command by adding a new path to the ignore.json file + * @param path JSON path to ignore + * @param outputFolder Output folder containing ignore.json + */ +function handleAddIgnore(path: string, outputFolder: string) { + const ignoreFilePath = `${outputFolder}/ignore.json`; + let ignoreList: string[] = []; + + // Create output folder if it doesn't exist + if (!fs.existsSync(outputFolder)) { + fs.mkdirSync(outputFolder, { recursive: true }); + } + + // Read existing ignore file if present + if (fs.existsSync(ignoreFilePath)) { + ignoreList = JSON.parse(fs.readFileSync(ignoreFilePath, "utf-8")); + } + + // Add new path if not already present + if (!ignoreList.includes(path)) { + ignoreList.push(path); + fs.writeFileSync(ignoreFilePath, JSON.stringify(ignoreList, null, 2)); + console.log(`Added path "${path}" to ignore list in ${ignoreFilePath}`); + } else { + console.log(`Path "${path}" already exists in ignore list`); + } + + process.exit(0); +} + +export async function main() { + const args = parseArguments(); + + // If using add-ignore command, the command handler will exit the process + + const { oldPath, newPath, outputFolder, ignoreDescription, ignorePathCase } = args; + configuration.ignoreDescription = ignoreDescription; + if (ignorePathCase !== undefined) { + configuration.ignorePathCase = ignorePathCase; + } + + logHeader(`Processing old swagger from: ${oldPath}...`); + const mergedOldfile = mergeFiles(oldPath!); + const processedOldFile = processDocument(mergedOldfile); + const sortedOldFile = sortOpenAPIDocument(processedOldFile); + + logHeader(`Processing new swagger from: ${newPath}...`); + const newFile = readFileContent(newPath!); + const processedNewFile = processDocument(JSON.parse(newFile.toString())); + const sortedNewFile = sortOpenAPIDocument(processedNewFile); + + logHeader("Comparing old and new Swagger files..."); + if (outputFolder) { + const ignoreFilePath = `${outputFolder}/ignore.json`; + if (fs.existsSync(ignoreFilePath)) { + logHeader(`Processing ignore file...`); + const ignoreFileContent = JSON.parse(fs.readFileSync(ignoreFilePath, "utf-8")); + for (const path of ignoreFileContent) { + addIgnorePath(path); + } + + processIgnoreList(sortedOldFile, sortedNewFile); + } + + fs.writeFileSync( + `${outputFolder}/oldNormalizedSwagger.json`, + JSON.stringify(sortedOldFile, null, 2), + ); + fs.writeFileSync( + `${outputFolder}/newNormalizedSwagger.json`, + JSON.stringify(sortedNewFile, null, 2), + ); + } + + let report: string = ""; + const diffForFile = diff(sortedOldFile, sortedNewFile); + + // // TO-DELETE: Read the diff file from disk + // const diffForFile = JSON.parse(fs.readFileSync(`C:/Users/pashao/GIT/azure-rest-api-specs/specification/agrifood/validation-results/diff.json`, 'utf-8')); + + const changedPaths = findChangedPaths(diffForFile); + if (changedPaths.length > 0) { + logWarning( + `Found ${changedPaths.length} changed paths in the diff. If it is just case change and you confirm it is expected, run tsmv with --ignorePathCase option to ignore case changes.`, + ); + const changedPathsReport = formatChangedPathsReport(changedPaths); + console.log(changedPathsReport); + report += changedPathsReport; + } + + const differences = findDifferences(diffForFile); + const differencesReport = formatDifferenceReport(differences); + console.log(differencesReport); + report += differencesReport; + + const modifiedValues = findModifiedValues(diffForFile); + const modifiedValuesReport = formatModifiedValuesReport(modifiedValues); + console.log(modifiedValuesReport); + report += modifiedValuesReport; + + if (outputFolder) { + fs.writeFileSync(`${outputFolder}/diff.json`, JSON.stringify(diffForFile, null, 2)); + fs.writeFileSync(`${outputFolder}/API_CHANGES.md`, report); + logHeader(`Difference report written to ${outputFolder}/API_CHANGES.md`); + + const suggestedPrompt = generatePrompts(diffForFile); + if (suggestedPrompt.length > 0) { + logWarning(`Considering these suggested prompts for the diff:`); + suggestedPrompt.forEach((prompt) => { + console.log(prompt); + }); + } + if (args.jsonOutput) { + fs.writeFileSync(`${outputFolder}/tsmv_output.json`, JSON.stringify(jsonOutput, null, 2)); + logHeader(`JSON output written to ${outputFolder}/tsmv_output.json`); + console.log( + `---- Start of Json Output ----\n${JSON.stringify(jsonOutput, null, 2)}\n---- End of Json Output ----`, + ); + } + } else { + console.log(report); + } +} diff --git a/eng/tools/typespec-migration-validation/src/jsonOutput.ts b/eng/tools/typespec-migration-validation/src/jsonOutput.ts new file mode 100644 index 000000000000..6c2877421c77 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/jsonOutput.ts @@ -0,0 +1,40 @@ +interface ChangeBase { + category: string; + path: string; +} + +export interface ValueChange extends ChangeBase { + category: "value-changed"; + oldValue: string; + newValue: string; +} + +export interface AddDeleteChange extends ChangeBase { + category: "added-or-deleted"; + key: string; + type: "added" | "deleted"; + value: string; +} + +export interface PathChange extends ChangeBase { + category: "path-changed"; + type: "added" | "deleted"; +} + +export type Change = ValueChange | AddDeleteChange | PathChange; + +export interface Suggestion { + suggestion: string; + path?: string; +} +export interface JsonOutput { + version: "1.0.0"; + suggestions: Suggestion[]; + apiChanges: Change[]; +} + +export const jsonOutput: JsonOutput = { + version: "1.0.0", + suggestions: [], + apiChanges: [], +}; diff --git a/eng/tools/typespec-migration-validation/src/log.ts b/eng/tools/typespec-migration-validation/src/log.ts new file mode 100644 index 000000000000..5b437a8ddf2a --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/log.ts @@ -0,0 +1,38 @@ +// Add this at the top of your file +const colors = { + reset: "\x1b[0m", + bright: "\x1b[1m", + dim: "\x1b[2m", + underscore: "\x1b[4m", + red: "\x1b[31m", + green: "\x1b[32m", + yellow: "\x1b[33m", + blue: "\x1b[34m", + magenta: "\x1b[35m", + cyan: "\x1b[36m", + white: "\x1b[37m", + success: "\x1b[32m✓\x1b[0m", // Green checkmark + warning: "\x1b[33m⚠\x1b[0m", // Yellow warning + error: "\x1b[31m✗\x1b[0m", // Red X +}; + +// Helper functions for colored logging +export function logSuccess(message: string): void { + console.log(`${colors.green}${message}${colors.reset}`); +} + +export function logInfo(message: string): void { + console.log(`${colors.blue}${message}${colors.reset}`); +} + +export function logWarning(message: string): void { + console.log(`${colors.yellow}${message}${colors.reset}`); +} + +export function logError(message: string): void { + console.error(`${colors.red}${message}${colors.reset}`); +} + +export function logHeader(message: string): void { + console.log(`\n${colors.cyan}${colors.bright}${message}${colors.reset}`); +} diff --git a/eng/tools/typespec-migration-validation/src/parameter.ts b/eng/tools/typespec-migration-validation/src/parameter.ts new file mode 100644 index 000000000000..bc15d4bfd013 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/parameter.ts @@ -0,0 +1,76 @@ +const apiVersionAlias: string[] = ["api-version", "apiVersion", "apiVersionParameter"]; + +export function isApiVersionParameter(obj: Record) { + if (obj["$ref"] !== undefined) { + const commonTypePattern = + /^\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/common-types\/resource-management\/v[1-6]\/types.json#\/parameters\/ApiVersionParameter$/; + if (commonTypePattern.test(obj["$ref"])) return true; + + if ( + apiVersionAlias + .map((a) => `#/parameters/${a}`.toLowerCase()) + .filter((a) => (obj["$ref"] as string).toLowerCase().includes(a)).length > 0 + ) + return true; + } else if (obj["name"] !== undefined) { + if (apiVersionAlias.map((a) => a.toLowerCase()).includes(obj["name"].toLowerCase())) + return true; + } + + return false; +} + +const subscriptionIdAlias: string[] = [ + "subscription-id", + "subscriptionId", + "subscriptionIdParameter", +]; + +export function isSubscriptionIdParameter(obj: Record) { + if (obj["$ref"] !== undefined) { + const commonTypePattern = + /^\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/common-types\/resource-management\/v[1-6]\/types\.json#\/parameters\/SubscriptionIdParameter$/; + if (commonTypePattern.test(obj["$ref"])) return true; + + if ( + subscriptionIdAlias + .map((a) => `#/parameters/${a}`.toLowerCase()) + .filter((a) => (obj["$ref"] as string).toLowerCase().includes(a)).length > 0 + ) + return true; + } else if (obj["name"] !== undefined) { + if (subscriptionIdAlias.map((a) => a.toLowerCase()).includes(obj["name"].toLowerCase())) + return true; + } + + return false; +} + +const resourceGroupNameAlias: string[] = [ + "resource-group-name", + "resourceGroupName", + "resourceGroupNameParameter", + "resource-group", + "resourceGroup", + "resourceGroupParameter", +]; + +export function isResourceGroupNameParameter(obj: Record) { + if (obj["$ref"] !== undefined) { + const commonTypePattern = + /^\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/common-types\/resource-management\/v[1-6]\/types\.json#\/parameters\/ResourceGroupNameParameter$/; + if (commonTypePattern.test(obj["$ref"])) return true; + + if ( + resourceGroupNameAlias + .map((a) => `#/parameters/${a}`.toLowerCase()) + .filter((a) => (obj["$ref"] as string).toLowerCase().includes(a)).length > 0 + ) + return true; + } else if (obj["name"] !== undefined) { + if (resourceGroupNameAlias.map((a) => a.toLowerCase()).includes(obj["name"].toLowerCase())) + return true; + } + + return false; +} diff --git a/eng/tools/typespec-migration-validation/src/summary.ts b/eng/tools/typespec-migration-validation/src/summary.ts new file mode 100644 index 000000000000..aafd9710b4f2 --- /dev/null +++ b/eng/tools/typespec-migration-validation/src/summary.ts @@ -0,0 +1,371 @@ +import { jsonOutput } from "./jsonOutput.js"; + +/** + * Interface representing a value and the set of JSON paths where it appears + */ +export interface ValueChangeInfo { + /** Map of JSON paths to their values where this key was found */ + paths: Map; +} + +/** + * Information about a path that was added or deleted + */ +export interface PathChangeInfo { + /** The path that was changed */ + path: string; + /** The type of change (added or deleted) */ + changeType: "added" | "deleted"; +} + +/** + * Information about a value that has been modified with old and new values + */ +export interface ModifiedValueInfo { + /** The path to the modified value */ + path: string; + /** The old value */ + oldValue: any; + /** The new value */ + newValue: any; +} + +/** + * Finds all keys in a JSON object that end with "__added" or "__deleted" and returns a mapping + * of keys to their paths + * @param jsonObj The JSON object to search + * @param currentPath The current path in the JSON object (used for recursion) + * @returns A Map with keys as result keys and values as sets of JSON paths + */ +export function findDifferences( + jsonObj: any, + currentPath: string = "", +): Map { + const results = new Map(); + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + // Process the current object + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + // Use bracket notation for keys with dots + const newPath = constructJsonPath(currentPath, key); + const value = jsonObj[key]; + + if (key.endsWith("__added") || key.endsWith("__deleted")) { + if (!results.has(key)) { + results.set(key, { + paths: new Map(), + }); + } + + results.get(key)?.paths.set(newPath, value); + } + + if (value !== null && typeof value === "object") { + if (Array.isArray(value)) { + // Handle arrays + value.forEach((item, index) => { + if (item !== null && typeof item === "object") { + const arrayItemPath = `${newPath}[${index}]`; + const nestedResults = findDifferences(item, arrayItemPath); + + // Merge nested results + nestedResults.forEach((ValueChangeInfo, nestedKey) => { + if (!results.has(nestedKey)) { + results.set(nestedKey, { + paths: new Map(), + }); + } + + ValueChangeInfo.paths.forEach((pathValue, path) => { + results.get(nestedKey)?.paths.set(path, pathValue); + }); + }); + } + }); + } else { + // Handle regular objects + const nestedResults = findDifferences(value, newPath); + + // Merge nested results + nestedResults.forEach((ValueChangeInfo, nestedKey) => { + if (!results.has(nestedKey)) { + results.set(nestedKey, { + paths: new Map(), + }); + } + + ValueChangeInfo.paths.forEach((pathValue, path) => { + results.get(nestedKey)?.paths.set(path, pathValue); + }); + }); + } + } + } + + return results; +} + +/** + * Constructs a proper JSON path, using bracket notation for segments containing dots + * and removes integer parts that follow array notation + * @param currentPath Current path being processed + * @param key The key to append to the path + * @returns A properly formatted JSON path string + */ +export function constructJsonPath(currentPath: string, key: string): string { + // Check if the key is an integer following an array notation + const isArrayIndex = /^\d+$/.test(key) && currentPath.endsWith("]"); + + // If this is an array index, we don't add it to the path + if (isArrayIndex) { + return currentPath; + } + + // Check if key contains dots or special characters requiring bracket notation + const needsBrackets = key.includes(".") || key.includes(" ") || key.includes("-"); + + if (currentPath === "") { + return needsBrackets ? `['${key.replace(/\\/g, "\\\\").replace(/'/g, "\\'")}']` : key; + } + + return needsBrackets + ? `${currentPath}['${key.replace(/\\/g, "\\\\").replace(/'/g, "\\'")}']` + : `${currentPath}.${key}`; +} + +/** + * Finds paths in the jsonObject["paths"] property that end with __added or __deleted + * and returns them sorted alphabetically (case insensitive) + * + * @param jsonObject The JSON object containing a "paths" property to search + * @returns Array of PathChangeInfo objects sorted alphabetically by path + */ +export function findChangedPaths(jsonObject: any): PathChangeInfo[] { + const results: PathChangeInfo[] = []; + + // Check if jsonObject and paths property exist + if (!jsonObject || !jsonObject.paths || typeof jsonObject.paths !== "object") { + return results; + } + + // Iterate through all keys in the paths object + for (const pathKey in jsonObject.paths) { + if (!Object.prototype.hasOwnProperty.call(jsonObject.paths, pathKey)) { + continue; + } + + // Check if the key ends with __added or __deleted + if (pathKey.endsWith("__added")) { + results.push({ + path: pathKey.replace(/__added$/, ""), + changeType: "added", + }); + } else if (pathKey.endsWith("__deleted")) { + results.push({ + path: pathKey.replace(/__deleted$/, ""), + changeType: "deleted", + }); + } + } + + // Sort results alphabetically by path (case insensitive) + return results.sort((a, b) => a.path.toLowerCase().localeCompare(b.path.toLowerCase())); +} + +/** + * Returns a formatted Markdown table representation of the added/deleted keys for reporting in README files + * Organizes output by key, with each key having its own table of values + * @param keyPathsMap Map of keys to paths from findDifferences + * @returns A formatted Markdown with tables organized by keys + */ +export function formatDifferenceReport(keyPathsMap: Map): string { + if (keyPathsMap.size === 0) { + return ""; + } + + let report = "## Swagger Changes\n\n"; + + // Group by keys + const keyGroups: Map> = new Map(); + + keyPathsMap.forEach((valueChangeInfo, key) => { + const baseKey = key.replace(/__added$|__deleted$/, ""); + const changeType = key.endsWith("__added") ? "added" : "deleted"; + + if (!keyGroups.has(baseKey)) { + keyGroups.set(baseKey, new Map()); + } + + valueChangeInfo.paths.forEach((value, path) => { + keyGroups.get(baseKey)?.set(path, [changeType, value]); + }); + }); + + // Generate a table for each key + keyGroups.forEach((pathsMap, key) => { + report += `### Changes for \`${key}\`\n\n`; + report += "| Path | Change Type | Value |\n"; + report += "|------|------------|-------|\n"; + + // Sort paths alphabetically + const sortedPaths = Array.from(pathsMap.keys()).sort((a, b) => + a.toLowerCase().localeCompare(b.toLowerCase()), + ); + + sortedPaths.forEach((path) => { + const [changeType, value] = pathsMap.get(path)!; + const formattedValue = + typeof value === "object" + ? JSON.stringify(value).substring(0, 100) + + (JSON.stringify(value).length > 100 ? "..." : "") + : String(value); + + report += `| \`${path}\` | ${changeType} | \`${formattedValue.replace(/\\/g, "\\\\").replace(/\|/g, "\\|")}\` |\n`; + jsonOutput.apiChanges.push({ + category: "added-or-deleted", + key: key, + path: path, + type: changeType, + value: formattedValue, + }); + }); + + report += "\n"; + }); + + return report; +} + +/** + * Returns a formatted string representation of the changed paths + * @param changedPaths Array of PathChangeInfo objects from findChangedPaths + * @returns A formatted string with paths and their change types + */ +export function formatChangedPathsReport(changedPaths: PathChangeInfo[]): string { + let report = ""; + if (changedPaths.length === 0) return report; + + report += "## Changed Paths\n\n"; + changedPaths.forEach(({ path, changeType }) => { + report += `Path: ${path}\nChange Type: ${changeType}\n\n`; + jsonOutput.apiChanges.push({ + category: "path-changed", + path: path, + type: changeType, + }); + }); + return report; +} + +/** + * Finds all pairs of keys with "__old" and "__new" suffixes in the JSON object + * @param jsonObj The JSON object to search + * @param currentPath The current path in the JSON object (used for recursion) + * @returns Array of ModifiedValueInfo objects containing paths and old/new values + */ +export function findModifiedValues(jsonObj: any, currentPath: string = ""): ModifiedValueInfo[] { + const oldValues = new Map(); + const newValues = new Map(); + const results: ModifiedValueInfo[] = []; + + if (!jsonObj || typeof jsonObj !== "object") { + return results; + } + + // Process the current level + for (const key in jsonObj) { + if (!Object.prototype.hasOwnProperty.call(jsonObj, key)) { + continue; + } + + const value = jsonObj[key]; + const newPath = constructJsonPath(currentPath, key); + + // Check for __old and __new keys + if (key.endsWith("__old")) { + const basePath = newPath.replace(/.__old$/, ""); + oldValues.set(basePath, { path: newPath, value }); + } else if (key.endsWith("__new")) { + const basePath = newPath.replace(/.__new$/, ""); + newValues.set(basePath, { path: newPath, value }); + } + + // Recursively process objects and arrays + if (value !== null && typeof value === "object") { + if (Array.isArray(value)) { + value.forEach((item, index) => { + if (item !== null && typeof item === "object") { + const arrayItemPath = `${newPath}[${index}]`; + results.push(...findModifiedValues(item, arrayItemPath)); + } + }); + } else { + results.push(...findModifiedValues(value, newPath)); + } + } + } + + // Match old and new value pairs + for (const [basePath, oldInfo] of oldValues.entries()) { + const newInfo = newValues.get(basePath); + if (newInfo) { + results.push({ + path: basePath, + oldValue: oldInfo.value, + newValue: newInfo.value, + }); + } + } + + return results; +} + +/** + * Returns a formatted Markdown table of the modified values + * @param modifiedValues Array of ModifiedValueInfo objects + * @returns A formatted Markdown table with old and new values + */ +export function formatModifiedValuesReport(modifiedValues: ModifiedValueInfo[]): string { + if (modifiedValues.length === 0) { + return ""; + } + + let report = "## Modified Values\n\n"; + report += "| Path | Old Value | New Value |\n"; + report += "|------|-----------|----------|\n"; + + modifiedValues.sort((a, b) => a.path.toLowerCase().localeCompare(b.path.toLowerCase())); + + modifiedValues.forEach(({ path, oldValue, newValue }) => { + const formatValue = (value: any): string => { + if (value === undefined) return "`undefined`"; + if (value === null) return "`null`"; + + const valueStr = + typeof value === "object" + ? JSON.stringify(value).substring(0, 100) + + (JSON.stringify(value).length > 100 ? "..." : "") + : String(value); + + return `\`${valueStr.replace(/\\/g, "\\\\").replace(/\|/g, "\\|").replace(/`/g, "\\`")}\``; + }; + + report += `| \`${path}\` | ${formatValue(oldValue)} | ${formatValue(newValue)} |\n`; + jsonOutput.apiChanges.push({ + category: "value-changed", + path: path, + oldValue: formatValue(oldValue), + newValue: formatValue(newValue), + }); + }); + + report += "\n"; + return report; +} diff --git a/eng/tools/typespec-migration-validation/tsconfig.json b/eng/tools/typespec-migration-validation/tsconfig.json new file mode 100644 index 000000000000..512241b97047 --- /dev/null +++ b/eng/tools/typespec-migration-validation/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "allowJs": true, + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], + "references": [{ "path": "../suppressions" }], +} diff --git a/eng/tools/typespec-requirement/package.json b/eng/tools/typespec-requirement/package.json index 32a71505a6cd..1e49e720bf91 100644 --- a/eng/tools/typespec-requirement/package.json +++ b/eng/tools/typespec-requirement/package.json @@ -5,11 +5,15 @@ "devDependencies": { "@types/node": "^20.0.0", "execa": "^9.3.0", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", "test:ci": "vitest run --reporter=verbose" }, diff --git a/eng/tools/typespec-requirement/test/typespec-requirement.test.ts b/eng/tools/typespec-requirement/test/typespec-requirement.test.ts index e4872dcef5c9..80baa79f241f 100644 --- a/eng/tools/typespec-requirement/test/typespec-requirement.test.ts +++ b/eng/tools/typespec-requirement/test/typespec-requirement.test.ts @@ -11,7 +11,12 @@ async function checkAllUnder(path: string, responseCache?: string) { command += ` -_ResponseCache ${responseCache}`; } - return await execa("pwsh", ["-Command", command], { cwd: repoRoot, reject: false }); + const result = await execa("pwsh", ["-Command", command], { cwd: repoRoot, reject: false }); + return { + // Merge stdout and stderr, since script writes to stdout in CI but stderr on dev machine + stdout: result.stdout + result.stderr, + exitCode: result.exitCode, + }; } test.concurrent("No files to check", async ({ expect }) => { @@ -36,9 +41,9 @@ test.concurrent("Parse error", async ({ expect }) => { }); test.concurrent("No tspconfig.yaml", async ({ expect }) => { - const { stderr, exitCode } = await checkAllUnder("specification/no-tspconfig"); + const { stdout, exitCode } = await checkAllUnder("specification/no-tspconfig"); - expect(stderr).toContain("no files named 'tspconfig.yaml'"); + expect(stdout).toContain("no files named 'tspconfig.yaml'"); expect(exitCode).toBe(1); }); @@ -74,12 +79,12 @@ test.concurrent("Hand-written, does not exist in main", async ({ expect }) => { }); test.concurrent("Hand-written, unexpected response checking main", async ({ expect }) => { - const { stdout, stderr, exitCode } = await checkAllUnder( + const { stdout, exitCode } = await checkAllUnder( "specification/hand-written", '@{"https://github.com/Azure/azure-rest-api-specs/tree/main/specification/hand-written/resource-manager/Microsoft.HandWritten/stable"=519}', ); expect(stdout).toContain("was not generated from TypeSpec"); - expect(stderr).toContain("Unexpected response"); + expect(stdout).toContain("Unexpected response"); expect(exitCode).toBe(1); }); diff --git a/eng/tools/typespec-requirement/tsconfig.json b/eng/tools/typespec-requirement/tsconfig.json index ec6d6640928a..1c9d0b24bed9 100644 --- a/eng/tools/typespec-requirement/tsconfig.json +++ b/eng/tools/typespec-requirement/tsconfig.json @@ -2,5 +2,7 @@ "extends": "../tsconfig.json", "compilerOptions": { "outDir": "./dist", - } + "rootDir": ".", + }, + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], } diff --git a/eng/tools/typespec-requirement/vite.config.ts b/eng/tools/typespec-requirement/vite.config.ts index 8b6908dc0a16..346b4e424284 100644 --- a/eng/tools/typespec-requirement/vite.config.ts +++ b/eng/tools/typespec-requirement/vite.config.ts @@ -1,8 +1,8 @@ -import { defineConfig } from 'vite' +import { defineConfig } from "vite"; export default defineConfig({ test: { // Default timeout of 5 seconds is too low - testTimeout: 20000 - } -}) + testTimeout: 20000, + }, +}); diff --git a/eng/tools/typespec-validation/cmd/tsv.js b/eng/tools/typespec-validation/cmd/tsv.js index 2a7c28a5d607..e2a37e0b5491 100755 --- a/eng/tools/typespec-validation/cmd/tsv.js +++ b/eng/tools/typespec-validation/cmd/tsv.js @@ -1,5 +1,5 @@ #!/usr/bin/env node -import { main } from "../dist/src/index.js" +import { main } from "../dist/src/index.js"; await main(); diff --git a/eng/tools/typespec-validation/package.json b/eng/tools/typespec-validation/package.json index ac9dc8159a3a..4da6c6a28298 100644 --- a/eng/tools/typespec-validation/package.json +++ b/eng/tools/typespec-validation/package.json @@ -7,19 +7,28 @@ "tsv": "cmd/tsv.js" }, "dependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", + "debug": "^4.4.0", "globby": "^14.0.1", + "picocolors": "^1.1.1", "simple-git": "^3.24.0", "suppressions": "file:../suppressions", + "strip-ansi": "^7.1.0", "yaml": "^2.4.2" }, "devDependencies": { + "@types/debug": "^4.1.12", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, "scripts": { "build": "tsc --build", + "format": "prettier . --ignore-path ../.prettierignore --write", + "format:check": "prettier . --ignore-path ../.prettierignore --check", + "format:check:ci": "prettier . --ignore-path ../.prettierignore --check --log-level debug", "test": "vitest", "test:ci": "vitest run --coverage --reporter=verbose" }, diff --git a/eng/tools/typespec-validation/src/index.ts b/eng/tools/typespec-validation/src/index.ts index af72393153ef..ce5e738a93f1 100755 --- a/eng/tools/typespec-validation/src/index.ts +++ b/eng/tools/typespec-validation/src/index.ts @@ -1,3 +1,4 @@ +import { stat } from "node:fs/promises"; import { ParseArgsConfig, parseArgs } from "node:util"; import { Suppression } from "suppressions"; import { CompileRule } from "./rules/compile.js"; @@ -8,32 +9,43 @@ import { FormatRule } from "./rules/format.js"; import { LinterRulesetRule } from "./rules/linter-ruleset.js"; import { NpmPrefixRule } from "./rules/npm-prefix.js"; import { SdkTspConfigValidationRule } from "./rules/sdk-tspconfig-validation.js"; -import { TsvRunnerHost } from "./tsv-runner-host.js"; +import { fileExists, getSuppressions, normalizePath } from "./utils.js"; + +// Context argument may add new properties or override checkingAllSpecs +export var context: Record = { checkingAllSpecs: false }; export async function main() { - const host = new TsvRunnerHost(); const args = process.argv.slice(2); const options = { folder: { type: "string", short: "f", }, + context: { + type: "string", + short: "c", + }, }; const parsedArgs = parseArgs({ args, options, allowPositionals: true } as ParseArgsConfig); const folder = parsedArgs.positionals[0]; - const absolutePath = host.normalizePath(folder); - if (!(await host.checkFileExists(absolutePath))) { + if (parsedArgs.positionals[1]) { + context = { ...context, ...JSON.parse(parsedArgs.positionals[1]) }; + } + + const absolutePath = normalizePath(folder); + + if (!(await fileExists(absolutePath))) { console.log(`Folder ${absolutePath} does not exist`); process.exit(1); } - if (!(await host.isDirectory(absolutePath))) { + if (!(await stat(absolutePath)).isDirectory()) { console.log(`Please run TypeSpec Validation on a directory path`); process.exit(1); } console.log("Running TypeSpecValidation on folder: ", absolutePath); - const suppressions: Suppression[] = await host.getSuppressions(absolutePath); + const suppressions: Suppression[] = await getSuppressions(absolutePath); // Suppressions for the whole tool must have no rules or sub-rules const toolSuppressions = suppressions.filter((s) => !s.rules?.length && !s.subRules?.length); @@ -58,7 +70,7 @@ export async function main() { for (let i = 0; i < rules.length; i++) { const rule = rules[i]; console.log("\nExecuting rule: " + rule.name); - const result = await rule.execute(host, absolutePath); + const result = await rule.execute(absolutePath); if (result.stdOutput) console.log(result.stdOutput); if (!result.success) { success = false; diff --git a/eng/tools/typespec-validation/src/rule.ts b/eng/tools/typespec-validation/src/rule.ts index 64b26927c53a..dd909c738654 100644 --- a/eng/tools/typespec-validation/src/rule.ts +++ b/eng/tools/typespec-validation/src/rule.ts @@ -1,5 +1,4 @@ import { RuleResult } from "./rule-result.js"; -import { TsvHost } from "./tsv-host.js"; export interface Rule { readonly name: string; @@ -8,5 +7,5 @@ export interface Rule { readonly action?: string; // TODO: required when all rules apply it readonly link?: string; - execute(host: TsvHost, folder: string): Promise; + execute(folder: string): Promise; } diff --git a/eng/tools/typespec-validation/src/rules/compile.ts b/eng/tools/typespec-validation/src/rules/compile.ts index 3e472075778e..ad01b239c359 100644 --- a/eng/tools/typespec-validation/src/rules/compile.ts +++ b/eng/tools/typespec-validation/src/rules/compile.ts @@ -1,21 +1,39 @@ -import path from "path"; -import { Rule } from "../rule.js"; +import { filterAsync } from "@azure-tools/specs-shared/array"; +import { readFile } from "fs/promises"; +import { globby } from "globby"; +import path, { basename, dirname, normalize } from "path"; +import pc from "picocolors"; +import stripAnsi from "strip-ansi"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { fileExists, getSuppressions, gitDiffTopSpecFolder, runNpm } from "../utils.js"; export class CompileRule implements Rule { readonly name = "Compile"; readonly description = "Compile TypeSpec"; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - if (await host.checkFileExists(path.join(folder, "main.tsp"))) { - let [err, stdout, stderr] = await host.runCmd( - `npm exec --no -- tsp compile --warn-as-error ${folder}`, - ); + if (await fileExists(path.join(folder, "main.tsp"))) { + let [err, stdout, stderr] = await runNpm([ + "exec", + "--no", + "--", + "tsp", + "compile", + "--list-files", + "--warn-as-error", + folder, + ]); + + stdOutput += stdout; + + // Rule output is easier to read if "tsp compile" stderr is redirected to stdOutput + stdOutput += stderr; + if ( stdout.toLowerCase().includes("no emitter was configured") || stdout.toLowerCase().includes("no output was generated") @@ -23,19 +41,153 @@ export class CompileRule implements Rule { success = false; errorOutput += "No emitter was configured and/or no output was generated."; } - if (err) { - success = false; - errorOutput += err.message; + + if (success) { + if (!err) { + // Check for *extra* typespec-generated swagger files under the output folder, which + // indicates a mismatch between TypeSpec and swaggers. + + // Example 'stdout': + // + // TypeSpec compiler v0.67.2 + // + // ../resource-manager/Microsoft.Contoso/stable/2021-11-01/contoso.json + // ../resource-manager/Microsoft.Contoso/stable/2021-11-01/examples/Operations_List.json + // + // Compilation completed successfully. + + // Remove ANSI color codes, handle windows and linux line endings + const lines = stripAnsi(stdout).split(/\r?\n/); + + // TODO: Use helpers in /.github once they support platform-specific paths + // Header, footer, and empty lines should be excluded by JSON filter + const outputSwaggers = lines + // Remove leading and trailing whitespace + .map((l) => l.trim()) + // Normalize to platform-specific path + .map((l) => normalize(l)) + // Filter to JSON files + .filter((p) => basename(p).toLowerCase().endsWith(".json")) + // Exclude examples + .filter((p) => !p.split(path.sep).includes("examples")); + + stdOutput += "\nGenerated Swaggers:\n"; + stdOutput += outputSwaggers.join("\n") + "\n"; + + if (outputSwaggers.length === 0) { + throw new Error("No generated swaggers found in output of 'tsp compile'"); + } + + // ../resource-manager/Microsoft.Contoso + const outputFolder = dirname(dirname(dirname(outputSwaggers[0]))); + const outputFilename = basename(outputSwaggers[0]); + + stdOutput += "\nOutput folder:\n"; + stdOutput += outputFolder + "\n"; + + // Filter to only specs matching the folder and filename extracted from the first output-file. + // Necessary to handle multi-project specs like keyvault. + // + // Globby only accepts patterns like posix paths. + const pattern = path.posix.join( + ...outputFolder.split(path.win32.sep), + "**", + outputFilename, + ); + const allSwaggers = (await globby(pattern, { ignore: ["**/examples/**"] })).map( + // Globby always returns posix paths + (p) => normalize(p), + ); + + // Filter to files generated by TypeSpec + const tspGeneratedSwaggers = await filterAsync( + allSwaggers, + async (swaggerPath: string) => { + const swaggerText = await readFile(swaggerPath, { encoding: "utf8" }); + const swaggerObj = JSON.parse(swaggerText); + return ( + swaggerObj["info"]?.["x-typespec-generated"] || + swaggerObj["info"]?.["x-cadl-generated"] + ); + }, + ); + + stdOutput += `\nSwaggers matching output folder and filename:\n`; + stdOutput += tspGeneratedSwaggers.join("\n") + "\n"; + + const suppressedSwaggers = await filterAsync( + tspGeneratedSwaggers, + async (swaggerPath: string) => { + const suppressions = await getSuppressions(swaggerPath); + + const extraSwaggerSuppressions = suppressions.filter( + (s) => s.rules?.includes(this.name) && s.subRules?.includes("ExtraSwagger"), + ); + + // Each path must specify a single version (without wildcards) under "preview|stable" + // + // Allowed: data-plane/Azure.Contoso.WidgetManager/preview/2022-11-01-preview/**/*.json + // Disallowed: data-plane/Azure.Contoso.WidgetManager/preview/**/*.json + // Disallowed: data-plane/**/*.json + // + // Include "." since a few specs use versions like "X.Y" instead of "YYYY-MM-DD" + const singleVersionPattern = "/(preview|stable)/[A-Za-z0-9._-]+/"; + + for (const suppression of extraSwaggerSuppressions) { + for (const p of suppression.paths) { + if (!p.match(singleVersionPattern)) { + throw new Error( + `Invalid path '${p}'. Path must only include one version per suppression.`, + ); + } + } + } + + return extraSwaggerSuppressions.length > 0; + }, + ); + + stdOutput += `\nSwaggers excluded via suppressions.yaml:\n`; + stdOutput += suppressedSwaggers.join("\n") + "\n"; + + const remainingSwaggers = tspGeneratedSwaggers.filter( + (s) => !suppressedSwaggers.includes(s), + ); + + stdOutput += `\nRemaining swaggers:\n`; + stdOutput += remainingSwaggers.join("\n") + "\n"; + + const extraSwaggers = remainingSwaggers.filter((s) => !outputSwaggers.includes(s)); + + if (extraSwaggers.length > 0) { + success = false; + errorOutput += pc.red( + `\nOutput folder '${outputFolder}' appears to contain TypeSpec-generated ` + + `swagger files, not generated from the current TypeSpec sources. ` + + `Perhaps you deleted a version from your TypeSpec, but didn't delete ` + + `the associated swaggers?\n\n`, + ); + errorOutput += pc.red(extraSwaggers.join("\n") + "\n"); + } + } else { + success = false; + errorOutput += err.message; + } } - stdOutput += stdout; - errorOutput += stderr; } const clientTsp = path.join(folder, "client.tsp"); - if (await host.checkFileExists(clientTsp)) { - let [err, stdout, stderr] = await host.runCmd( - `npm exec --no -- tsp compile --no-emit --warn-as-error ${clientTsp}`, - ); + if (await fileExists(clientTsp)) { + let [err, stdout, stderr] = await runNpm([ + "exec", + "--no", + "--", + "tsp", + "compile", + "--no-emit", + "--warn-as-error", + clientTsp, + ]); if (err) { success = false; errorOutput += err.message; @@ -45,7 +197,7 @@ export class CompileRule implements Rule { } if (success) { - const gitDiffResult = await host.gitDiffTopSpecFolder(host, folder); + const gitDiffResult = await gitDiffTopSpecFolder(folder); stdOutput += gitDiffResult.stdOutput; if (!gitDiffResult.success) { success = false; @@ -53,6 +205,7 @@ export class CompileRule implements Rule { errorOutput += `\nFiles have been changed after \`tsp compile\`. Run \`tsp compile\` and ensure all files are included in your change.`; } } + return { success: success, stdOutput: stdOutput, diff --git a/eng/tools/typespec-validation/src/rules/emit-autorest.ts b/eng/tools/typespec-validation/src/rules/emit-autorest.ts index 6e3e0882cb2d..c195d101412b 100644 --- a/eng/tools/typespec-validation/src/rules/emit-autorest.ts +++ b/eng/tools/typespec-validation/src/rules/emit-autorest.ts @@ -1,24 +1,24 @@ import { join } from "path"; import { parse as yamlParse } from "yaml"; -import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { fileExists, readTspConfig } from "../utils.js"; export class EmitAutorestRule implements Rule { readonly name = "EmitAutorest"; readonly description = 'Must emit "@azure-tools/typespec-autorest" by default'; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - const mainTspExists = await host.checkFileExists(join(folder, "main.tsp")); + const mainTspExists = await fileExists(join(folder, "main.tsp")); stdOutput += `mainTspExists: ${mainTspExists}\n`; if (mainTspExists) { - const configText = await host.readTspConfig(folder); + const configText = await readTspConfig(folder); const config = yamlParse(configText); const emit = config?.emit; diff --git a/eng/tools/typespec-validation/src/rules/flavor-azure.ts b/eng/tools/typespec-validation/src/rules/flavor-azure.ts index 1c01955448b3..ac6d1b76254a 100644 --- a/eng/tools/typespec-validation/src/rules/flavor-azure.ts +++ b/eng/tools/typespec-validation/src/rules/flavor-azure.ts @@ -1,24 +1,24 @@ import { parse as yamlParse } from "yaml"; -import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { readTspConfig } from "../utils.js"; export class FlavorAzureRule implements Rule { readonly name = "FlavorAzure"; readonly description = "Client emitters must set 'flavor:azure'"; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - const configText = await host.readTspConfig(folder); + const configText = await readTspConfig(folder); const config = yamlParse(configText); const options = config?.options; for (const emitter in options) { - if (this.isClientEmitter(emitter)) { + if (this.requiresAzureFlavor(emitter)) { const flavor = options[emitter]?.flavor; stdOutput += `"${emitter}":\n`; @@ -43,7 +43,12 @@ export class FlavorAzureRule implements Rule { }; } - isClientEmitter(name: string): boolean { + requiresAzureFlavor(name: string): boolean { + if (name === "@typespec/http-client-csharp") { + // C# emitters do not require flavor:azure. Instead, there + // is a separate emitter for Azure - @azure-typespec/http-client-csharp + return false; + } const regex = new RegExp( "^(@azure-tools/typespec-(csharp|java|python|ts)|@typespec/http-client-.+)$", ); diff --git a/eng/tools/typespec-validation/src/rules/folder-structure.ts b/eng/tools/typespec-validation/src/rules/folder-structure.ts index 9e010e12d664..5a777e6db198 100644 --- a/eng/tools/typespec-validation/src/rules/folder-structure.ts +++ b/eng/tools/typespec-validation/src/rules/folder-structure.ts @@ -1,21 +1,33 @@ +import debug from "debug"; +import { readFile } from "fs/promises"; +import { globby } from "globby"; import path from "path"; +import { simpleGit } from "simple-git"; import { parse as yamlParse } from "yaml"; -import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { fileExists, normalizePath, readTspConfig } from "../utils.js"; + +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); export class FolderStructureRule implements Rule { readonly name = "FolderStructure"; readonly description = "Verify spec directory's folder structure and naming conventions."; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - const gitRoot = host.normalizePath(await host.gitOperation(folder).revparse("--show-toplevel")); + const gitRoot = normalizePath(await simpleGit(folder).revparse("--show-toplevel")); const relativePath = path.relative(gitRoot, folder).split(path.sep).join("/"); + // If the folder containing TypeSpec sources is under "data-plane" or "resource-manager", the spec + // must be using "folder structure v2". Otherwise, it must be using v1. + const structureVersion = + relativePath.includes("data-plane") || relativePath.includes("resource-manager") ? 2 : 1; + stdOutput += `folder: ${folder}\n`; - if (!(await host.checkFileExists(folder))) { + if (!(await fileExists(folder))) { return { success: false, stdOutput: stdOutput, @@ -23,7 +35,7 @@ export class FolderStructureRule implements Rule { }; } - const tspConfigs = await host.globby([`${folder}/**tspconfig.*`]); + const tspConfigs = await globby([`${folder}/**tspconfig.*`]); stdOutput += `config files: ${JSON.stringify(tspConfigs)}\n`; tspConfigs.forEach((file: string) => { if (!file.endsWith("tspconfig.yaml")) { @@ -32,74 +44,159 @@ export class FolderStructureRule implements Rule { } }); - // Verify top level folder is lower case and remove empty entries when splitting by slash - const folderStruct = relativePath.split("/").filter(Boolean); - if (folderStruct[1].match(/[A-Z]/g)) { + // Verify tspconfig, main.tsp, examples/ + const mainExists = await fileExists(path.join(folder, "main.tsp")); + const clientExists = await fileExists(path.join(folder, "client.tsp")); + const tspConfigExists = await fileExists(path.join(folder, "tspconfig.yaml")); + + if (!mainExists && !clientExists) { + errorOutput += `Invalid folder structure: Spec folder must contain main.tsp or client.tsp.`; success = false; - errorOutput += `Invalid folder name. Folders under specification/ must be lower case.\n`; } - const packageFolder = folderStruct[folderStruct.length - 1]; - - // Verify package folder is at most 3 levels deep - if (folderStruct.length > 4) { + if (mainExists && !(await fileExists(path.join(folder, "examples")))) { + errorOutput += `Invalid folder structure: Spec folder with main.tsp must contain examples folder.`; success = false; - errorOutput += `Please limit TypeSpec folder depth to 3 levels or less`; } - // Verify second level folder is capitalized after each '.' - if (/(^|\. *)([a-z])/g.test(packageFolder)) { + const folderStruct = relativePath.split("/").filter(Boolean); + + // Verify top level folder is lower case and remove empty entries when splitting by slash + if (folderStruct[1].match(/[A-Z]/g)) { success = false; - errorOutput += `Invalid folder name. Folders under specification/${folderStruct[1]} must be capitalized after each '.'\n`; + errorOutput += `Invalid folder name. Folders under specification/ must be lower case.\n`; } - // Verify 'Shared' follows 'Management' - if (packageFolder.includes("Management") && packageFolder.includes("Shared")) { - if (!packageFolder.includes("Management.Shared")) { + if (structureVersion === 1) { + const packageFolder = folderStruct[folderStruct.length - 1]; + + if (!packageFolder.includes("Shared") && !tspConfigExists) { + errorOutput += `Invalid folder structure: Spec folder must contain tspconfig.yaml.`; success = false; - errorOutput += `Invalid folder name. For management libraries with a shared component, 'Shared' should follow 'Management'.`; } - } - // Verify tspconfig, main.tsp, examples/ - const mainExists = await host.checkFileExists(path.join(folder, "main.tsp")); - const clientExists = await host.checkFileExists(path.join(folder, "client.tsp")); - const tspConfigExists = await host.checkFileExists(path.join(folder, "tspconfig.yaml")); + // Verify package folder is at most 3 levels deep + if (folderStruct.length > 4) { + success = false; + errorOutput += `Please limit TypeSpec folder depth to 3 levels or less`; + } - if (!mainExists && !clientExists) { - errorOutput += `Invalid folder structure: Spec folder must contain main.tsp or client.tsp.`; - success = false; - } + // Verify second level folder is capitalized after each '.' + if (/(^|\. *)([a-z])/g.test(packageFolder)) { + success = false; + errorOutput += `Invalid folder name. Folders under specification/${folderStruct[1]} must be capitalized after each '.'\n`; + } - if (mainExists && !(await host.checkFileExists(path.join(folder, "examples")))) { - errorOutput += `Invalid folder structure: Spec folder with main.tsp must contain examples folder.`; - success = false; - } + // Verify 'Shared' follows 'Management' + if (packageFolder.includes("Management") && packageFolder.includes("Shared")) { + if (!packageFolder.includes("Management.Shared")) { + success = false; + errorOutput += `Invalid folder name. For management libraries with a shared component, 'Shared' should follow 'Management'.`; + } + } - if (!packageFolder.includes("Shared") && !tspConfigExists) { - errorOutput += `Invalid folder structure: Spec folder must contain tspconfig.yaml.`; - success = false; - } + if (tspConfigExists) { + const configText = await readTspConfig(folder); + const config = yamlParse(configText); + const rpFolder = + config?.options?.["@azure-tools/typespec-autorest"]?.["azure-resource-provider-folder"]; + stdOutput += `azure-resource-provider-folder: ${JSON.stringify(rpFolder)}\n`; - if (tspConfigExists) { - const configText = await host.readTspConfig(folder); - const config = yamlParse(configText); - const rpFolder = - config?.options?.["@azure-tools/typespec-autorest"]?.["azure-resource-provider-folder"]; - stdOutput += `azure-resource-provider-folder: ${JSON.stringify(rpFolder)}\n`; - - if ( - rpFolder?.trim()?.endsWith("resource-manager") && - !packageFolder.endsWith(".Management") - ) { - errorOutput += `Invalid folder structure: TypeSpec for resource-manager specs must be in a folder ending with '.Management'`; + if ( + rpFolder?.trim()?.endsWith("resource-manager") && + !packageFolder.endsWith(".Management") + ) { + errorOutput += `Invalid folder structure: TypeSpec for resource-manager specs must be in a folder ending with '.Management'`; + success = false; + } else if ( + !rpFolder?.trim()?.endsWith("resource-manager") && + packageFolder.endsWith(".Management") + ) { + errorOutput += `Invalid folder structure: TypeSpec for data-plane specs or shared code must be in a folder NOT ending with '.Management'`; + success = false; + } + } + } else if (structureVersion === 2) { + if (!tspConfigExists) { + errorOutput += `Invalid folder structure: Spec folder must contain tspconfig.yaml.`; success = false; - } else if ( - !rpFolder?.trim()?.endsWith("resource-manager") && - packageFolder.endsWith(".Management") - ) { - errorOutput += `Invalid folder structure: TypeSpec for data-plane specs or shared code must be in a folder NOT ending with '.Management'`; + } + + const specType = folder.includes("data-plane") ? "data-plane" : "resource-manager"; + if (specType === "data-plane") { + if (folderStruct.length !== 4) { + errorOutput += + "Invalid folder structure: TypeSpec for data-plane specs must be in a folder exactly one level under 'data-plane', like 'specification/foo/data-plane/FooAnalytics'."; + success = false; + } + } else if (specType === "resource-manager") { + if (folderStruct.length !== 5) { + errorOutput += + "Invalid folder structure: TypeSpec for resource-manager specs must be in a folder exactly two levels under 'resource-manager', like 'specification/foo/resource-management/Microsoft.Foo/Foo'."; + success = false; + } + + const rpNamespaceRegex = /^[A-Za-z0-9\.]+$/; + const rpNamespaceFolder = folderStruct[folderStruct.length - 2]; + + if (!rpNamespaceRegex.test(rpNamespaceFolder)) { + success = false; + errorOutput += `RPNamespace folder '${rpNamespaceFolder}' does not match regex ${rpNamespaceRegex}`; + } + } + + const serviceRegex = /^[A-Za-z0-9]+$/; + const serviceFolder = folderStruct[folderStruct.length - 1]; + + if (!serviceRegex.test(serviceFolder)) { success = false; + errorOutput += `Service folder '${serviceFolder}' does not match regex ${serviceRegex}`; + } + } + + // Ensure specs only import files from same folder under "specification" + stdOutput += "imports:\n"; + + const allowedImportRoot = + structureVersion === 1 ? path.join(...folderStruct.slice(0, 2)) : folder; + stdOutput += ` ${allowedImportRoot}\n`; + + const allowedImportRootResolved = path.resolve(gitRoot, allowedImportRoot); + + const tsps = await globby("**/*.tsp", { cwd: allowedImportRootResolved }); + + for (const tsp of tsps) { + const tspResolved = path.resolve(allowedImportRootResolved, tsp); + + const pattern = /^\s*import\s+['"]([^'"]+)['"]\s*;\s*$/gm; + const text = await readFile(tspResolved, { encoding: "utf8" }); + const imports = [...text.matchAll(pattern)].map((m) => m[1]); + + // The path specified in the import must either start with "./" or "../", or be an absolute path. + // The path should either point to a directory, or have an extension of either ".tsp" or ".js". + // https://typespec.io/docs/language-basics/imports/ + // + // We don't bother checking if the path has an extension of ".tsp" or ".js", because a directory + // is also valid, and a directory could be named anything. We only care if the path is under + // $teamFolder, so we just treat anything that looks like a relative or absolute path, + // as a path. + const fileImports = imports.filter( + (i) => i.startsWith("./") || i.startsWith("../") || path.isAbsolute(i), + ); + + stdOutput += ` ${tsp}: ${JSON.stringify(fileImports)}\n`; + + for (const fileImport of fileImports) { + const fileImportResolved = path.resolve(path.dirname(tspResolved), fileImport); + + const relative = path.relative(allowedImportRootResolved, fileImportResolved); + + if (relative.startsWith("..")) { + errorOutput += + `Invalid folder structure: '${tsp}' imports '${fileImport}', ` + + `which is outside '${path.relative(gitRoot, allowedImportRoot)}'`; + success = false; + } } } diff --git a/eng/tools/typespec-validation/src/rules/format.ts b/eng/tools/typespec-validation/src/rules/format.ts index 1ce13f5c870c..ec9ce69f29f3 100644 --- a/eng/tools/typespec-validation/src/rules/format.ts +++ b/eng/tools/typespec-validation/src/rules/format.ts @@ -1,18 +1,19 @@ -import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { gitDiffTopSpecFolder, runNpm } from "../utils.js"; export class FormatRule implements Rule { readonly name = "Format"; readonly description = "Format TypeSpec"; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - let [err, stdout, stderr] = await host.runCmd( - 'npm exec --no -- tsp format "../**/*.tsp"', // Format parent folder to include shared files + let [err, stdout, stderr] = await runNpm( + // Format parent folder to include shared files + ["exec", "--no", "--", "tsp", "format", "../**/*.tsp"], folder, ); if (err) { @@ -22,8 +23,8 @@ export class FormatRule implements Rule { stdOutput += stdout; errorOutput += stderr; - [err, stdout, stderr] = await host.runCmd( - "npm exec --no -- prettier --write tspconfig.yaml", + [err, stdout, stderr] = await runNpm( + ["exec", "--no", "--", "prettier", "--write", "tspconfig.yaml"], folder, ); if (err) { @@ -34,7 +35,7 @@ export class FormatRule implements Rule { errorOutput += stderr; if (success) { - const gitDiffResult = await host.gitDiffTopSpecFolder(host, folder); + const gitDiffResult = await gitDiffTopSpecFolder(folder); stdOutput += gitDiffResult.stdOutput; if (!gitDiffResult.success) { success = false; diff --git a/eng/tools/typespec-validation/src/rules/linter-ruleset.ts b/eng/tools/typespec-validation/src/rules/linter-ruleset.ts index 5bc11f75bbc6..490f7f551716 100644 --- a/eng/tools/typespec-validation/src/rules/linter-ruleset.ts +++ b/eng/tools/typespec-validation/src/rules/linter-ruleset.ts @@ -1,8 +1,8 @@ import { join } from "path"; import { parse as yamlParse } from "yaml"; -import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { fileExists, readTspConfig } from "../utils.js"; // Maps deprecated rulesets to the replacement rulesets const deprecatedRulesets = new Map([ @@ -19,20 +19,20 @@ export class LinterRulesetRule implements Rule { readonly description = "Ensures each spec includes the correct linter ruleset (data-plane or management-plane)"; - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { let success = true; let stdOutput = ""; let errorOutput = ""; - const configText = await host.readTspConfig(folder); + const configText = await readTspConfig(folder); const config = yamlParse(configText); const rpFolder = config?.options?.["@azure-tools/typespec-autorest"]?.["azure-resource-provider-folder"]; stdOutput += `azure-resource-provider-folder: ${JSON.stringify(rpFolder)}\n`; - const mainTspExists = await host.checkFileExists(join(folder, "main.tsp")); - const clientTspExists = await host.checkFileExists(join(folder, "client.tsp")); + const mainTspExists = await fileExists(join(folder, "main.tsp")); + const clientTspExists = await fileExists(join(folder, "client.tsp")); let files = []; if (mainTspExists) { files.push("main.tsp"); diff --git a/eng/tools/typespec-validation/src/rules/npm-prefix.ts b/eng/tools/typespec-validation/src/rules/npm-prefix.ts index 6f73792211cc..692722996888 100644 --- a/eng/tools/typespec-validation/src/rules/npm-prefix.ts +++ b/eng/tools/typespec-validation/src/rules/npm-prefix.ts @@ -1,18 +1,23 @@ -import { Rule } from "../rule.js"; +import debug from "debug"; +import { simpleGit } from "simple-git"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; +import { Rule } from "../rule.js"; +import { normalizePath, runNpm } from "../utils.js"; + +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); export class NpmPrefixRule implements Rule { readonly name = "NpmPrefix"; readonly description = "Verify spec is using root level package.json"; - async execute(host: TsvHost, folder: string): Promise { - const git = host.gitOperation(folder); + async execute(folder: string): Promise { + const git = simpleGit(folder); let expected_npm_prefix: string | undefined; try { // If spec folder is inside a git repo, returns repo root - expected_npm_prefix = host.normalizePath(await git.revparse("--show-toplevel")); + expected_npm_prefix = normalizePath(await git.revparse("--show-toplevel")); } catch (err) { // If spec folder is outside git repo, or if problem running git, throws error return { @@ -21,9 +26,7 @@ export class NpmPrefixRule implements Rule { }; } - const actual_npm_prefix = host.normalizePath( - (await host.runCmd(`npm prefix`, folder))[1].trim(), - ); + const actual_npm_prefix = normalizePath((await runNpm(["prefix"], folder))[1].trim()); let success = true; let stdOutput = diff --git a/eng/tools/typespec-validation/src/rules/sdk-tspconfig-validation.ts b/eng/tools/typespec-validation/src/rules/sdk-tspconfig-validation.ts index 43db9e9256aa..c3ff77d4bc89 100644 --- a/eng/tools/typespec-validation/src/rules/sdk-tspconfig-validation.ts +++ b/eng/tools/typespec-validation/src/rules/sdk-tspconfig-validation.ts @@ -2,8 +2,8 @@ import { join } from "path"; import { parse as yamlParse } from "yaml"; import { Rule } from "../rule.js"; import { RuleResult } from "../rule-result.js"; -import { TsvHost } from "../tsv-host.js"; import { Suppression } from "suppressions"; +import { fileExists, getSuppressions, readTspConfig } from "../utils.js"; type ExpectedValueType = string | boolean | RegExp; type SkipResult = { shouldSkip: boolean; reason?: string }; @@ -17,8 +17,8 @@ export abstract class TspconfigSubRuleBase { this.expectedValue = expectedValue; } - public async execute(host: TsvHost, folder: string): Promise { - const tspconfigExists = await host.checkFileExists(join(folder, "tspconfig.yaml")); + public async execute(folder: string): Promise { + const tspconfigExists = await fileExists(join(folder, "tspconfig.yaml")); if (!tspconfigExists) return this.createFailedResult( `Failed to find ${join(folder, "tspconfig.yaml")}`, @@ -27,7 +27,7 @@ export abstract class TspconfigSubRuleBase { let config = undefined; try { - const configText = await host.readTspConfig(folder); + const configText = await readTspConfig(folder); config = yamlParse(configText); } catch (error) { return this.createFailedResult( @@ -111,18 +111,22 @@ class TspconfigEmitterOptionsSubRuleBase extends TspconfigSubRuleBase { this.emitterName = emitterName; } - protected validate(config: any): RuleResult { + public getEmitterName() { + return this.emitterName; + } + + protected tryFindOption(config: any): Record | undefined { let option: Record | undefined = config?.options?.[this.emitterName]; for (const segment of this.keyToValidate.split(".")) { if (option && typeof option === "object" && !Array.isArray(option) && segment in option) option = option![segment]; - else - return this.createFailedResult( - `Failed to find "options.${this.emitterName}.${this.keyToValidate}"`, - `Please add "options.${this.emitterName}.${this.keyToValidate}"`, - ); + else return undefined; } + return option; + } + protected validate(config: any): RuleResult { + const option = this.tryFindOption(config); if (option === undefined) return this.createFailedResult( `Failed to find "options.${this.emitterName}.${this.keyToValidate}"`, @@ -145,7 +149,7 @@ class TspconfigEmitterOptionsSubRuleBase extends TspconfigSubRuleBase { } function isManagementSdk(folder: string): boolean { - return folder.includes(".Management"); + return folder.includes("/resource-manager/") || folder.includes(".Management"); } function skipForDataPlane(folder: string): SkipResult { @@ -162,9 +166,37 @@ function skipForManagementPlane(folder: string): SkipResult { }; } +function skipForRestLevelClientOrManagementPlaneInTsEmitter( + config: any, + folder: string, +): SkipResult { + const isRLCClient = + config?.options?.["@azure-tools/typespec-ts"]?.["is-modular-library"] !== true; + const shouldSkip = isManagementSdk(folder) || isRLCClient; + const result: SkipResult = { + shouldSkip: shouldSkip, + }; + if (result.shouldSkip) + result.reason = "This rule is only applicable for data plane SDKs with modular client."; + return result; +} + +function skipForModularOrManagementPlaneInTsEmitter(config: any, folder: string): SkipResult { + const isModularClient = + config?.options?.["@azure-tools/typespec-ts"]?.["is-modular-library"] === true; + const shouldSkip = isManagementSdk(folder) || isModularClient; + const result: SkipResult = { + shouldSkip: shouldSkip, + }; + if (result.shouldSkip) + result.reason = "This rule is only applicable for data plane SDKs with rest level client."; + return result; +} + function skipForNonModularOrDataPlaneInTsEmitter(config: any, folder: string): SkipResult { - // isModularLibrary is true by default - const isModularClient = config?.options?.["@azure-tools/typespec-ts"]?.isModularLibrary !== false; + // is-modular-library is true by default + const isModularClient = + config?.options?.["@azure-tools/typespec-ts"]?.["is-modular-library"] !== false; const shouldRun = isManagementSdk(folder) && isModularClient; const result: SkipResult = { shouldSkip: !shouldRun, @@ -188,46 +220,38 @@ export class TspConfigJavaAzPackageDirectorySubRule extends TspconfigEmitterOpti } } -// ----- TS management modular sub rules ----- -// NOTE: this is only used when TS emitter is migrating to the new option style -// will be deleted when the migration is done -class TspConfigTsOptionMigrationSubRuleBase extends TspconfigEmitterOptionsSubRuleBase { - private oldOptionStyleSubRule: TspconfigEmitterOptionsSubRuleBase & { - validateOption(config: any): RuleResult; - }; - private newOptionStyleSubRule: TspconfigEmitterOptionsSubRuleBase & { - validateOption(config: any): RuleResult; - }; - constructor(oldOptionName: string, newOptionName: string, expectedValue: ExpectedValueType) { - class PrivateOptionStyleSubRule extends TspconfigEmitterOptionsSubRuleBase { - constructor(optionName: string, expectedValue: ExpectedValueType) { - super("@azure-tools/typespec-ts", optionName, expectedValue); - } - public validateOption(config: any): RuleResult { - return this.validate(config); - } - } +export class TspConfigJavaMgmtPackageDirFormatSubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super( + "@azure-tools/typespec-java", + "package-dir", + new RegExp(/^azure-resourcemanager-[^\/]+$/), // Matches "azure-resourcemanager-" with no restriction on characters after the hyphen + ); + } - // the parameters are not used, but are required to be passed to the super constructor - super("", "", ""); - this.oldOptionStyleSubRule = new PrivateOptionStyleSubRule(oldOptionName, expectedValue); - this.newOptionStyleSubRule = new PrivateOptionStyleSubRule(newOptionName, expectedValue); + protected skip(_: any, folder: string) { + return skipForDataPlane(folder); // Ensures this rule only applies to management plane SDKs } +} - protected validate(config: any): RuleResult { - var newResult = this.newOptionStyleSubRule.validateOption(config); - // if success == true, then the option is found and passes validation - // if success == false, and "Failed to find" is not in errorOutput, then the option is found but fails validation - if (newResult.success || !newResult.errorOutput?.includes("Failed to find")) return newResult; +export class TspConfigJavaMgmtNamespaceFormatSubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super( + "@azure-tools/typespec-java", + "namespace", + new RegExp(/^com\.azure\.resourcemanager\.[^\.]+$/), // Matches "com.azure.resourcemanager." with no restriction on characters after the last dot + ); + } - var oldResult = this.oldOptionStyleSubRule.validateOption(config); - return oldResult; + protected skip(_: any, folder: string) { + return skipForDataPlane(folder); // Ensures this rule only applies to management plane SDKs } } -export class TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule extends TspConfigTsOptionMigrationSubRuleBase { +// ----- TS management modular sub rules ----- +export class TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { - super("experimentalExtensibleEnums", "experimental-extensible-enums", true); + super("@azure-tools/typespec-ts", "experimental-extensible-enums", true); } protected skip(config: any, folder: string) { return skipForNonModularOrDataPlaneInTsEmitter(config, folder); @@ -236,22 +260,64 @@ export class TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule extend export class TspConfigTsMgmtModularPackageDirectorySubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { - super("@azure-tools/typespec-ts", "package-dir", new RegExp(/^arm(?:-[a-z]+)+$/)); + super("@azure-tools/typespec-ts", "package-dir", new RegExp(/^arm-[^\/]+$/)); } + protected skip(config: any, folder: string) { return skipForNonModularOrDataPlaneInTsEmitter(config, folder); } } -export class TspConfigTsMgmtModularPackageNameMatchPatternSubRule extends TspConfigTsOptionMigrationSubRuleBase { +export class TspConfigTsMgmtModularPackageNameMatchPatternSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { - super("packageDetails.name", "package-details.name", new RegExp(/^\@azure\/arm(?:-[a-z]+)+$/)); + super( + "@azure-tools/typespec-ts", + "package-details.name", + new RegExp(/^\@azure\/arm(?:-[a-z]+)+$/), + ); } protected skip(config: any, folder: string) { return skipForNonModularOrDataPlaneInTsEmitter(config, folder); } } +// ----- TS data plane sub rules ----- +export class TspConfigTsDpPackageDirectorySubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super("@azure-tools/typespec-ts", "package-dir", new RegExp(/^(?:[a-z]+-)*rest$/)); + } + protected skip(config: any, folder: string) { + return skipForModularOrManagementPlaneInTsEmitter(config, folder); + } +} + +export class TspConfigTsRlcDpPackageNameMatchPatternSubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super( + "@azure-tools/typespec-ts", + "package-details.name", + new RegExp(/^\@azure-rest\/[a-z]+(?:-[a-z]+)*$/), + ); + } + + protected skip(config: any, folder: string) { + return skipForModularOrManagementPlaneInTsEmitter(config, folder); + } +} + +export class TspConfigTsMlcDpPackageNameMatchPatternSubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super( + "@azure-tools/typespec-ts", + "package-details.name", + new RegExp(/^@azure\/(?:[a-z]+-)*[a-z]+$/), + ); + } + + protected skip(config: any, folder: string) { + return skipForRestLevelClientOrManagementPlaneInTsEmitter(config, folder); + } +} // ----- Go data plane sub rules ----- export class TspConfigGoDpServiceDirMatchPatternSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { @@ -289,6 +355,7 @@ export class TspConfigGoDpModuleMatchPatternSubRule extends TspconfigEmitterOpti } } +// ----- Go Mgmt plane sub rules ----- export class TspConfigGoMgmtServiceDirMatchPatternSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { super("@azure-tools/typespec-go", "service-dir", new RegExp(/^sdk\/resourcemanager\/[^\/]*$/)); @@ -329,9 +396,9 @@ export class TspConfigGoMgmtFixConstStutteringTrueSubRule extends TspconfigEmitt } } -export class TspConfigGoMgmtGenerateExamplesTrueSubRule extends TspconfigEmitterOptionsSubRuleBase { +export class TspConfigGoMgmtGenerateSamplesTrueSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { - super("@azure-tools/typespec-go", "generate-examples", true); + super("@azure-tools/typespec-go", "generate-samples", true); } protected skip(_: any, folder: string) { return skipForDataPlane(folder); @@ -370,6 +437,15 @@ export class TspConfigPythonMgmtPackageDirectorySubRule extends TspconfigEmitter } } +export class TspConfigPythonMgmtNamespaceSubRule extends TspconfigEmitterOptionsSubRuleBase { + constructor() { + super("@azure-tools/typespec-python", "namespace", new RegExp(/^azure\.mgmt(\.[a-z]+){1,2}$/)); + } + protected skip(_: any, folder: string) { + return skipForDataPlane(folder); + } +} + // ----- Python data plane sub rules ----- export class TspConfigPythonDpPackageDirectorySubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { @@ -381,12 +457,6 @@ export class TspConfigPythonDpPackageDirectorySubRule extends TspconfigEmitterOp } // ----- Python azure sub rules ----- -export class TspConfigPythonAzPackageNameEqualStringSubRule extends TspconfigEmitterOptionsSubRuleBase { - constructor() { - super("@azure-tools/typespec-python", "package-name", "{package-dir}"); - } -} - export class TspConfigPythonAzGenerateTestTrueSubRule extends TspconfigEmitterOptionsSubRuleBase { constructor() { super("@azure-tools/typespec-python", "generate-test", true); @@ -410,6 +480,29 @@ export class TspConfigCsharpAzNamespaceEqualStringSubRule extends TspconfigEmitt constructor() { super("@azure-tools/typespec-csharp", "namespace", "{package-dir}"); } + override validate(config: any): RuleResult { + const option = this.tryFindOption(config); + + if (option === undefined) + return this.createFailedResult( + `Failed to find "options.${this.emitterName}.${this.keyToValidate}"`, + `Please add "options.${this.emitterName}.${this.keyToValidate}"`, + ); + + const packageDir = config?.options?.[this.emitterName]?.["package-dir"]; + const actualValue = option as unknown as undefined | string | boolean; + if ( + this.validateValue(actualValue, this.expectedValue) || + (packageDir !== undefined && this.validateValue(actualValue, packageDir)) + ) { + return { success: true }; + } + + return this.createFailedResult( + `The value of options.${this.emitterName}.${this.keyToValidate} "${actualValue}" does not match "${this.expectedValue}" or the value of "package-dir" option or parameter`, + `Please update the value of "options.${this.emitterName}.${this.keyToValidate}" to match "${this.expectedValue}" or the value of "package-dir" option or parameter`, + ); + } } export class TspConfigCsharpAzClearOutputFolderTrueSubRule extends TspconfigEmitterOptionsSubRuleBase { @@ -430,14 +523,19 @@ export class TspConfigCsharpMgmtPackageDirectorySubRule extends TspconfigEmitter export const defaultRules = [ new TspConfigCommonAzServiceDirMatchPatternSubRule(), new TspConfigJavaAzPackageDirectorySubRule(), + new TspConfigJavaMgmtPackageDirFormatSubRule(), + new TspConfigJavaMgmtNamespaceFormatSubRule(), new TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule(), new TspConfigTsMgmtModularPackageDirectorySubRule(), new TspConfigTsMgmtModularPackageNameMatchPatternSubRule(), + new TspConfigTsDpPackageDirectorySubRule(), + new TspConfigTsRlcDpPackageNameMatchPatternSubRule(), + new TspConfigTsMlcDpPackageNameMatchPatternSubRule(), new TspConfigGoMgmtServiceDirMatchPatternSubRule(), new TspConfigGoMgmtPackageDirectorySubRule(), new TspConfigGoMgmtModuleEqualStringSubRule(), new TspConfigGoMgmtFixConstStutteringTrueSubRule(), - new TspConfigGoMgmtGenerateExamplesTrueSubRule(), + new TspConfigGoMgmtGenerateSamplesTrueSubRule(), new TspConfigGoAzGenerateFakesTrueSubRule(), new TspConfigGoMgmtHeadAsBooleanTrueSubRule(), new TspConfigGoAzInjectSpansTrueSubRule(), @@ -445,8 +543,8 @@ export const defaultRules = [ new TspConfigGoDpPackageDirectoryMatchPatternSubRule(), new TspConfigGoDpModuleMatchPatternSubRule(), new TspConfigPythonMgmtPackageDirectorySubRule(), + new TspConfigPythonMgmtNamespaceSubRule(), new TspConfigPythonDpPackageDirectorySubRule(), - new TspConfigPythonAzPackageNameEqualStringSubRule(), new TspConfigPythonAzGenerateTestTrueSubRule(), new TspConfigPythonAzGenerateSampleTrueSubRule(), new TspConfigCsharpAzPackageDirectorySubRule(), @@ -465,25 +563,51 @@ export class SdkTspConfigValidationRule implements Rule { this.subRules = subRules; } - async execute(host: TsvHost, folder: string): Promise { + async execute(folder: string): Promise { const tspConfigPath = join(folder, "tspconfig.yaml"); - const suppressions = await host.getSuppressions(tspConfigPath); + const suppressions = await getSuppressions(tspConfigPath); + + const shouldSuppressEntireRule = suppressions.some( + (s) => s.rules?.includes(this.name) === true && (!s.subRules || s.subRules.length === 0), + ); + if (shouldSuppressEntireRule) + return { success: true, stdOutput: `[${this.name}]: validation skipped.` }; + this.setSuppressedKeyPaths(suppressions); const failedResults = []; let success = true; for (const subRule of this.subRules) { - // TODO: support wildcard - if (this.suppressedKeyPaths.has(subRule.getPathOfKeyToValidate())) continue; - const result = await subRule.execute(host, folder!); + // Check for both direct matches and wildcard patterns + if (this.isKeyPathSuppressed(subRule.getPathOfKeyToValidate())) continue; + const result = await subRule.execute(folder!); if (!result.success) failedResults.push(result); - success &&= result.success; + + let isSubRuleSuccess = result.success; + + // TODO: remove when @azure-tools/typespec-csharp is ready for validating tspconfig + if (subRule instanceof TspconfigEmitterOptionsSubRuleBase) { + const emitterOptionSubRule = subRule as TspconfigEmitterOptionsSubRuleBase; + const emitterName = emitterOptionSubRule.getEmitterName(); + if (emitterName === "@azure-tools/typespec-csharp" && isSubRuleSuccess === false) { + console.warn( + `Validation on option "${emitterOptionSubRule.getPathOfKeyToValidate()}" in "${emitterName}" are failed. However, per ${emitterName}’s decision, we will treat it as passed.`, + ); + isSubRuleSuccess = true; + } + } + + success &&= isSubRuleSuccess; } - // NOTE: to avoid huge impact on existing PRs, we always return true with info/warning messages. + const stdOutputFailedResults = + failedResults.length > 0 + ? `${failedResults.map((r) => r.errorOutput).join("\n")}\nPlease see https://aka.ms/azsdk/spec-gen-sdk-config for more info.\nFor additional information on TypeSpec validation, please refer to https://aka.ms/azsdk/specs/typespec-validation.` + : ""; + return { - success: true, - stdOutput: `[${this.name}]: validation ${success ? "passed" : "failed"}.\n${failedResults.map((r) => r.errorOutput).join("\n")}`, + success, + stdOutput: `[${this.name}]: validation ${success ? "passed" : "failed"}.\n${stdOutputFailedResults}`, }; } @@ -491,7 +615,29 @@ export class SdkTspConfigValidationRule implements Rule { this.suppressedKeyPaths = new Set(); for (const suppression of suppressions) { if (!suppression.rules?.includes(this.name)) continue; - for (const ignoredKey of suppression.subRules ?? []) this.suppressedKeyPaths.add(ignoredKey); + for (const ignoredKey of suppression.subRules ?? []) { + this.suppressedKeyPaths.add(ignoredKey); + console.warn(`Skip validation on ${ignoredKey}.`); + } + } + } + + private isKeyPathSuppressed(keyPath: string): boolean { + // Direct match + if (this.suppressedKeyPaths.has(keyPath)) { + return true; + } + + // Only check for wildcard at the end (format: prefix.*) + for (const suppressedPath of this.suppressedKeyPaths) { + if (suppressedPath.endsWith(".*")) { + const prefix = suppressedPath.slice(0, -2); // Remove the '.*' at the end + if (keyPath.startsWith(prefix)) { + return true; + } + } } + + return false; } } diff --git a/eng/tools/typespec-validation/src/tsv-host.ts b/eng/tools/typespec-validation/src/tsv-host.ts deleted file mode 100644 index 13aaff86df43..000000000000 --- a/eng/tools/typespec-validation/src/tsv-host.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { Suppression } from "suppressions"; -import { RuleResult } from "./rule-result.js"; - -export interface TsvHost { - checkFileExists(file: string): Promise; - isDirectory(path: string): Promise; - gitOperation(folder: string): IGitOperation; - readTspConfig(folder: string): Promise; - runCmd(cmd: string, cwd?: string): Promise<[Error | null, string, string]>; - normalizePath(folder: string): string; - gitDiffTopSpecFolder(host: TsvHost, folder: string): Promise; - globby(patterns: string[]): Promise; - getSuppressions(path: string): Promise; -} - -export interface IGitOperation { - status( - options?: string[], - ): Promise<{ isClean(): boolean; modified: string[]; not_added: string[] }>; - diff(): Promise; - revparse(option: string): Promise; -} diff --git a/eng/tools/typespec-validation/src/tsv-runner-host.ts b/eng/tools/typespec-validation/src/tsv-runner-host.ts deleted file mode 100644 index e1d39a8187c1..000000000000 --- a/eng/tools/typespec-validation/src/tsv-runner-host.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { readFile } from "fs/promises"; -import { globby } from "globby"; -import { join } from "path"; -import { simpleGit } from "simple-git"; -import { getSuppressions as getSuppressionsImpl, Suppression } from "suppressions"; -import { RuleResult } from "./rule-result.js"; -import { IGitOperation, TsvHost } from "./tsv-host.js"; -import { - checkFileExists, - gitDiffTopSpecFolder, - isDirectory, - normalizePath, - runCmd, -} from "./utils.js"; - -export class TsvRunnerHost implements TsvHost { - checkFileExists(file: string): Promise { - return checkFileExists(file); - } - - isDirectory(path: string) { - return isDirectory(path); - } - - gitOperation(folder: string): IGitOperation { - return simpleGit(folder); - } - - readTspConfig(folder: string): Promise { - return readFile(join(folder, "tspconfig.yaml"), "utf-8"); - } - - runCmd(cmd: string, cwd: string): Promise<[Error | null, string, string]> { - return runCmd(cmd, cwd); - } - - normalizePath(folder: string): string { - return normalizePath(folder); - } - - gitDiffTopSpecFolder(host: TsvHost, folder: string): Promise { - return gitDiffTopSpecFolder(host, folder); - } - - globby(patterns: string[]): Promise { - return globby(patterns); - } - - getSuppressions(path: string): Promise { - return getSuppressionsImpl("TypeSpecValidation", path); - } -} diff --git a/eng/tools/typespec-validation/src/utils.ts b/eng/tools/typespec-validation/src/utils.ts index 7d28ddb94b90..9dd8df6ea216 100644 --- a/eng/tools/typespec-validation/src/utils.ts +++ b/eng/tools/typespec-validation/src/utils.ts @@ -1,33 +1,55 @@ -import { access, stat } from "fs/promises"; -import { exec } from "child_process"; -import defaultPath, { PlatformPath } from "path"; -import { TsvHost } from "./tsv-host.js"; - -export async function runCmd(cmd: string, cwd?: string) { - console.log(`run command:${cmd}`); - const { err, stdout, stderr } = (await new Promise((res) => - exec( - cmd, - { encoding: "utf8", maxBuffer: 1024 * 1024 * 64, cwd: cwd }, - (err: unknown, stdout: unknown, stderr: unknown) => - res({ err: err, stdout: stdout, stderr: stderr }), - ), - )) as any; - - return [err, stdout, stderr] as [Error | null, string, string]; +import { execNpm, isExecError } from "@azure-tools/specs-shared/exec"; +import { ConsoleLogger } from "@azure-tools/specs-shared/logger"; +import debug from "debug"; +import { access, readFile } from "fs/promises"; +import defaultPath, { join, PlatformPath } from "path"; +import { simpleGit } from "simple-git"; +import { getSuppressions as getSuppressionsImpl, Suppression } from "suppressions"; +import { context } from "./index.js"; + +// Enable simple-git debug logging to improve console output +debug.enable("simple-git"); + +// Wraps execNpm() to return error (and coalesce stdout and stderr) instead of throwing +export async function runNpm( + args: string[], + cwd?: string, +): Promise<[Error | null, string, string]> { + try { + const { stdout, stderr } = await execNpm(args, { + logger: new ConsoleLogger(), + maxBuffer: 64 * 1024 * 1024, + cwd, + }); + return [null, stdout, stderr]; + } catch (error) { + if (isExecError(error)) { + return [error, error.stdout ?? "", error.stderr ?? ""]; + } else { + throw error; + } + } } -export async function checkFileExists(file: string) { +export async function fileExists(file: string) { return access(file) .then(() => true) .catch(() => false); } -export async function isDirectory(path: string) { - return (await stat(path)).isDirectory(); +export async function readTspConfig(folder: string) { + return readFile(join(folder, "tspconfig.yaml"), "utf-8"); +} + +export async function getSuppressions(path: string): Promise { + return getSuppressionsImpl("TypeSpecValidation", path, context); } export function normalizePath(folder: string, path: PlatformPath = defaultPath) { + return normalizePathImpl(folder, path); +} + +export function normalizePathImpl(folder: string, path: PlatformPath = defaultPath) { return path .resolve(folder) .split(path.sep) @@ -35,8 +57,8 @@ export function normalizePath(folder: string, path: PlatformPath = defaultPath) .replace(/^([a-z]):/, (_match, driveLetter) => driveLetter.toUpperCase() + ":"); } -export async function gitDiffTopSpecFolder(host: TsvHost, folder: string) { - const git = host.gitOperation(folder); +export async function gitDiffTopSpecFolder(folder: string) { + const git = simpleGit(folder); let topSpecFolder = folder.replace(/(^.*specification\/[^\/]*)(.*)/, "$1"); let stdOutput = `Running git diff on folder ${topSpecFolder}`; let gitStatus = await git.status(["--porcelain", topSpecFolder]); diff --git a/eng/tools/typespec-validation/test/compile.test.ts b/eng/tools/typespec-validation/test/compile.test.ts index 6fdd5178996d..7b319afa0bf5 100644 --- a/eng/tools/typespec-validation/test/compile.test.ts +++ b/eng/tools/typespec-validation/test/compile.test.ts @@ -1,110 +1,272 @@ -import { describe, it } from "vitest"; -import { CompileRule } from "../src/rules/compile.js"; -import { TsvTestHost } from "./tsv-test-host.js"; -import { TsvHost } from "../src/tsv-host.js"; +import { mockAll, mockFolder } from "./mocks.js"; +mockAll(); + +import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from "vitest"; + +import * as fsPromises from "fs/promises"; +import * as globby from "globby"; +import path from "path"; import { RuleResult } from "../src/rule-result.js"; -import { strict as assert } from "node:assert"; +import { CompileRule } from "../src/rules/compile.js"; + +import * as utils from "../src/utils.js"; + +const swaggerPath = "data-plane/Azure.Foo/preview/2022-11-01-preview/foo.json"; +const handwrittenSwaggerPath = "data-plane/Azure.Foo/preview/2021-11-01-preview/foo.json"; + describe("compile", function () { + let gitDiffTopSpecFolderSpy: MockInstance; + let runNpmSpy: MockInstance; + + beforeEach(() => { + vi.spyOn(utils, "fileExists").mockResolvedValue(true); + vi.spyOn(utils, "getSuppressions").mockResolvedValue([]); + gitDiffTopSpecFolderSpy = vi + .spyOn(utils, "gitDiffTopSpecFolder") + .mockImplementation(async (folder) => { + return { + success: true, + stdOutput: `Running git diff on folder ${folder}}`, + errorOutput: "", + }; + }); + runNpmSpy = vi + .spyOn(utils, "runNpm") + .mockImplementation(async (args, cwd) => [null, `runNpm ${args.join(" ")} at ${cwd}`, ""]); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + it("should succeed if project can compile", async function () { - const result = await new CompileRule().execute(new TsvTestHost(), TsvTestHost.folder); + const compileOutput = + // header, not a filename + "header\n" + + // windows line endings + "\r\n" + + // ensure paths are trimmed + `\t${swaggerPath} \n` + + // ensure paths are normalized + `${path.win32.normalize(swaggerPath)}\n` + + // ensure filtered to JSON files + "data-plane/readme.md\n" + + // ensure examples are skipped + `${swaggerPath.replace("foo.json", "examples/example.json")}\n`; + + runNpmSpy.mockImplementation( + async (_args: string[], _cwd?: string): Promise<[Error | null, string, string]> => { + return [null, compileOutput, ""]; + }, + ); - assert(result.success); + // ensure handwritten swaggers are ignored + vi.mocked(globby.globby).mockImplementation(async () => [swaggerPath, handwrittenSwaggerPath]); + vi.mocked(fsPromises.readFile).mockImplementation(async (path) => + path === swaggerPath ? '{"info": {"x-typespec-generated": true}}' : "{}", + ); + + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: true, + }); }); it("should fail if no emitter was configured", async function () { - let host = new TsvTestHost(); - host.runCmd = async (cmd: string, _cwd: string): Promise<[Error | null, string, string]> => { - if (cmd.includes("tsp compile")) { - return [null, "no emitter was configured", ""]; - } else { - return [null, "", ""]; - } - }; + runNpmSpy.mockImplementation( + async (args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + if (args.join(" ").includes("tsp compile")) { + return [null, "no emitter was configured", ""]; + } else { + return [null, "", ""]; + } + }, + ); - const result = await new CompileRule().execute(host, TsvTestHost.folder); - - assert(!result.success); + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: false, + }); }); it("should fail if no output was generated", async function () { - let host = new TsvTestHost(); - host.runCmd = async (cmd: string, _cwd: string): Promise<[Error | null, string, string]> => { - if (cmd.includes("tsp compile")) { - return [null, "no output was generated", ""]; - } else { - return [null, "", ""]; - } - }; + runNpmSpy.mockImplementation( + async (args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + if (args.join(" ").includes("tsp compile")) { + return [null, "no output was generated", ""]; + } else { + return [null, "", ""]; + } + }, + ); - const result = await new CompileRule().execute(host, TsvTestHost.folder); + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: false, + }); + }); + + it("should throw if output has no generated swaggers", async function () { + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => [ + null, + "not-swagger", + "", + ], + ); - assert(!result.success); + await expect(new CompileRule().execute(mockFolder)).rejects.toThrowErrorMatchingInlineSnapshot( + `[Error: No generated swaggers found in output of 'tsp compile']`, + ); + }); + + it("should fail if extra swaggers", async function () { + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + return [null, swaggerPath, ""]; + }, + ); + + // Simulate extra swagger + vi.mocked(globby.globby).mockImplementation(async () => [ + swaggerPath, + swaggerPath.replace("2022", "2023"), + swaggerPath.replace("2023", "2024"), + ]); + + vi.mocked(fsPromises.readFile).mockImplementation(async (path) => { + return path.toString().includes("2024") + ? '{"info": {"x-typespec-generated": true}}' + : '{"info": {"x-cadl-generated": true}}'; + }); + + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: false, + errorOutput: expect.stringContaining("not generated from the current"), + }); + }); + + it("supports suppressions", async function () { + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + return [null, swaggerPath, ""]; + }, + ); + + // Simulate extra swagger + vi.mocked(globby.globby).mockImplementation(async () => [ + swaggerPath, + swaggerPath.replace("2022", "2023"), + swaggerPath.replace("2023", "2024"), + ]); + + vi.mocked(fsPromises.readFile).mockImplementation(async (path) => { + return path.toString().includes("2024") + ? '{"info": {"x-typespec-generated": true}}' + : '{"info": {"x-cadl-generated": true}}'; + }); + + vi.spyOn(utils, "getSuppressions").mockImplementation(async (path) => { + return path.includes("2023") || path.includes("2024") + ? [ + { + tool: "TypeSpecValidation", + rules: ["Compile"], + subRules: ["ExtraSwagger"], + paths: [swaggerPath.replace("2022", "2023"), swaggerPath.replace("2023", "2024")], + reason: "test reason", + }, + ] + : []; + }); + + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: true, + }); + }); + + it("throws on invalid suppressions", async function () { + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + return [null, swaggerPath, ""]; + }, + ); + + vi.spyOn(utils, "getSuppressions").mockImplementation(async () => [ + { + tool: "TypeSpecValidation", + rules: ["Compile"], + subRules: ["ExtraSwagger"], + paths: ["**/*"], + reason: "test reason", + }, + ]); + + await expect(new CompileRule().execute(mockFolder)).rejects.toThrow("Invalid path"); }); it("should skip git diff check if compile fails", async function () { - let host = new TsvTestHost(); - host.runCmd = async (cmd: string, _cwd: string): Promise<[Error | null, string, string]> => { - if (cmd.includes("tsp compile")) { - return [ - { name: "compilation_error", message: "compilation error" }, - "running tsp compile", - "compilation failure", - ]; - } - return [null, "", ""]; - }; - host.gitDiffTopSpecFolder = async (host: TsvHost, folder: string): Promise => { - let stdOut = `Running git diff on folder ${folder}, running default cmd ${host.runCmd( - "", - "", - )}`; - return { - success: true, - stdOutput: stdOut, - }; - }; + runNpmSpy.mockImplementation( + async (args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + if (args.join(" ").includes("tsp compile")) { + return [ + { name: "compilation_error", message: "compilation error" }, + "running tsp compile", + "compilation failure", + ]; + } + return [null, "", ""]; + }, + ); - const result = await new CompileRule().execute(host, TsvTestHost.folder); - assert(result.stdOutput); - assert(!result.stdOutput.includes("Running git diff")); + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: false, + stdOutput: expect.not.stringContaining("Running git diff"), + }); }); it("should fail if git diff fails", async function () { - let host = new TsvTestHost(); - host.gitDiffTopSpecFolder = async (host: TsvHost, folder: string): Promise => { - let stdOut = `Running git diff on folder ${folder}, running default cmd ${host.runCmd( - "", - "", - )}`; + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + return [null, swaggerPath, ""]; + }, + ); + + vi.mocked(globby.globby).mockImplementation(async () => [swaggerPath]); + + gitDiffTopSpecFolderSpy.mockImplementation(async (folder: string): Promise => { + let stdOut = `Running git diff on folder ${folder}`; + return { success: false, stdOutput: stdOut, errorOutput: `Files generated: ${folder}/bar`, }; - }; + }); - const result = await new CompileRule().execute(host, TsvTestHost.folder); - assert(result.stdOutput); - assert(result.stdOutput.includes("Running git diff")); - assert(!result.success); + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: false, + stdOutput: expect.stringContaining("Running git diff"), + }); }); it("should succeed if git diff succeeds", async function () { - let host = new TsvTestHost(); - host.gitDiffTopSpecFolder = async (host: TsvHost, folder: string): Promise => { - let stdOut = `Running git diff on folder ${folder}, running default cmd ${host.runCmd( - "", - "", - )}`; + runNpmSpy.mockImplementation( + async (_args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + return [null, swaggerPath, ""]; + }, + ); + + vi.mocked(globby.globby).mockImplementation(async () => [swaggerPath]); + + gitDiffTopSpecFolderSpy.mockImplementation(async (folder: string): Promise => { + let stdOut = `Running git diff on folder ${folder}`; return { success: true, stdOutput: stdOut, }; - }; + }); - const result = await new CompileRule().execute(host, TsvTestHost.folder); - assert(result.stdOutput); - assert(result.stdOutput.includes("Running git diff")); - assert(result.success); + await expect(new CompileRule().execute(mockFolder)).resolves.toMatchObject({ + success: true, + stdOutput: expect.stringContaining("Running git diff"), + }); }); }); diff --git a/eng/tools/typespec-validation/test/emit-autorest.test.ts b/eng/tools/typespec-validation/test/emit-autorest.test.ts index 18b7fcf9d897..63da44117d29 100644 --- a/eng/tools/typespec-validation/test/emit-autorest.test.ts +++ b/eng/tools/typespec-validation/test/emit-autorest.test.ts @@ -1,61 +1,78 @@ -import { describe, it } from "vitest"; +import { contosoTspConfig } from "@azure-tools/specs-shared/test/examples"; +import { strict as assert } from "node:assert"; import { join } from "path"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; import { EmitAutorestRule } from "../src/rules/emit-autorest.js"; -import { TsvTestHost } from "./tsv-test-host.js"; -import { strict as assert } from "node:assert"; + +import * as utils from "../src/utils.js"; +import { mockFolder } from "./mocks.js"; describe("emit-autorest", function () { + let fileExistsSpy: MockInstance; + let readTspConfigSpy: MockInstance; + + beforeEach(() => { + fileExistsSpy = vi.spyOn(utils, "fileExists").mockResolvedValue(true); + readTspConfigSpy = vi.spyOn(utils, "readTspConfig").mockResolvedValue(contosoTspConfig); + }); + + afterEach(() => { + fileExistsSpy.mockReset(); + readTspConfigSpy.mockReset(); + }); + it("should succeed if no main.tsp", async function () { - let host = new TsvTestHost(); - host.checkFileExists = async (file: string) => file != join(TsvTestHost.folder, "main.tsp"); + fileExistsSpy.mockImplementation(async (file: string) => file != join(mockFolder, "main.tsp")); - const result = await new EmitAutorestRule().execute(host, TsvTestHost.folder); + const result = await new EmitAutorestRule().execute(mockFolder); assert(result.success); }); it("should succeed if emits autorest", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_mockFolder: string) => ` emit: - "@azure-tools/typespec-autorest" -`; +`, + ); - const result = await new EmitAutorestRule().execute(host, TsvTestHost.folder); + const result = await new EmitAutorestRule().execute(mockFolder); assert(result.success); }); it("should fail if config is empty", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ""; + readTspConfigSpy.mockImplementation(async (_mockFolder: string) => ""); - const result = await new EmitAutorestRule().execute(host, TsvTestHost.folder); + const result = await new EmitAutorestRule().execute(mockFolder); assert(!result.success); }); it("should fail if no emit", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_mockFolder: string) => ` linter: extends: - "@azure-tools/typespec-azure-rulesets/data-plane" -`; +`, + ); - const result = await new EmitAutorestRule().execute(host, TsvTestHost.folder); + const result = await new EmitAutorestRule().execute(mockFolder); assert(!result.success); }); it("should fail if no emit autorest", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_mockFolder: string) => ` emit: - foo -`; +`, + ); - const result = await new EmitAutorestRule().execute(host, TsvTestHost.folder); + const result = await new EmitAutorestRule().execute(mockFolder); assert(!result.success); }); diff --git a/eng/tools/typespec-validation/test/flavor-azure.test.ts b/eng/tools/typespec-validation/test/flavor-azure.test.ts index e282d9364989..4a85c5ecb44e 100644 --- a/eng/tools/typespec-validation/test/flavor-azure.test.ts +++ b/eng/tools/typespec-validation/test/flavor-azure.test.ts @@ -1,9 +1,22 @@ -import { describe, it } from "vitest"; -import { FlavorAzureRule } from "../src/rules/flavor-azure.js"; -import { TsvTestHost } from "./tsv-test-host.js"; +import { contosoTspConfig } from "@azure-tools/specs-shared/test/examples"; import { strict as assert } from "node:assert"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; +import { FlavorAzureRule } from "../src/rules/flavor-azure.js"; + +import * as utils from "../src/utils.js"; +import { mockFolder } from "./mocks.js"; describe("flavor-azure", function () { + let readTspConfigSpy: MockInstance; + + beforeEach(() => { + readTspConfigSpy = vi.spyOn(utils, "readTspConfig").mockResolvedValue(contosoTspConfig); + }); + + afterEach(() => { + readTspConfigSpy.mockReset(); + }); + const clientEmitterNames = [ "@azure-tools/typespec-csharp", "@azure-tools/typespec-java", @@ -16,69 +29,72 @@ describe("flavor-azure", function () { clientEmitterNames.forEach(function (emitter) { it(`should fail if "${emitter}" is missing flavor`, async function () { - let host = new TsvTestHost(); - - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "${emitter}": package-dir: "foo" - `; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); + `, + ); + const result = await new FlavorAzureRule().execute(mockFolder); assert(!result.success); }); it(`should fail if "${emitter}" flavor is not "azure"`, async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "${emitter}": package-dir: "foo" flavor: not-azure - `; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); + `, + ); + const result = await new FlavorAzureRule().execute(mockFolder); assert(!result.success); }); it(`should succeed if ${emitter} flavor is "azure"`, async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "${emitter}": package-dir: "foo" flavor: azure - `; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); + `, + ); + const result = await new FlavorAzureRule().execute(mockFolder); assert(result.success); }); }); nonClientEmitterNames.forEach(function (emitter) { it(`should succeed if ${emitter} is missing flavor`, async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "${emitter}": azure-resource-provider-folder: "data-plane" - `; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); + `, + ); + const result = await new FlavorAzureRule().execute(mockFolder); assert(result.success); }); }); it("should succeed if config is empty", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ""; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); + readTspConfigSpy.mockImplementation(async (_folder: string) => ""); + const result = await new FlavorAzureRule().execute(mockFolder); assert(result.success); }); it("should succeed if config has no options", async function () { - let host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` emit: - "@azure-tools/typespec-autorest" -`; - const result = await new FlavorAzureRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new FlavorAzureRule().execute(mockFolder); assert(result.success); }); }); diff --git a/eng/tools/typespec-validation/test/folder-structure.test.ts b/eng/tools/typespec-validation/test/folder-structure.test.ts index 300788ad5f75..bcda6f765fc9 100644 --- a/eng/tools/typespec-validation/test/folder-structure.test.ts +++ b/eng/tools/typespec-validation/test/folder-structure.test.ts @@ -1,61 +1,75 @@ -import { describe, it } from "vitest"; -import { FolderStructureRule } from "../src/rules/folder-structure.js"; -import { TsvTestHost } from "./tsv-test-host.js"; +import { mockAll, mockFolder } from "./mocks.js"; +mockAll(); + +import { contosoTspConfig } from "@azure-tools/specs-shared/test/examples"; +import * as globby from "globby"; import { strict as assert } from "node:assert"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; +import { FolderStructureRule } from "../src/rules/folder-structure.js"; + +import * as utils from "../src/utils.js"; describe("folder-structure", function () { + let fileExistsSpy: MockInstance; + let normalizePathSpy: MockInstance; + let readTspConfigSpy: MockInstance; + + beforeEach(() => { + fileExistsSpy = vi.spyOn(utils, "fileExists").mockResolvedValue(true); + normalizePathSpy = vi.spyOn(utils, "normalizePath"); + readTspConfigSpy = vi.spyOn(utils, "readTspConfig").mockResolvedValue(contosoTspConfig); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + it("should fail if folder doesn't exist", async function () { + fileExistsSpy.mockResolvedValue(false); + + const result = await new FolderStructureRule().execute(mockFolder); + assert(result.errorOutput); + assert(result.errorOutput.includes("does not exist")); + }); + it("should fail if tspconfig has incorrect extension", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yml"]; - }; + }); - const result = await new FolderStructureRule().execute(host, TsvTestHost.folder); + const result = await new FolderStructureRule().execute(mockFolder); assert(result.errorOutput); assert(result.errorOutput.includes("Invalid config file")); }); it("should fail if folder under specification/ is capitalized", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); - const result = await new FolderStructureRule().execute(host, "/gitroot/specification/Foo/Foo"); + const result = await new FolderStructureRule().execute("/gitroot/specification/Foo/Foo"); assert(result.errorOutput); assert(result.errorOutput.includes("must be lower case")); }); it("should succeed if package folder has trailing slash", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); - const result = await new FolderStructureRule().execute( - host, - "/gitroot/specification/foo/Foo/Foo/", - ); + const result = await new FolderStructureRule().execute("/gitroot/specification/foo/Foo/Foo/"); assert(result.success); }); it("should fail if package folder is more than 3 levels deep", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo/Foo/Foo", ); assert(result.errorOutput); @@ -63,67 +77,47 @@ describe("folder-structure", function () { }); it("should fail if second level folder not capitalized at after each '.' ", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); - const result = await new FolderStructureRule().execute( - host, - "/gitroot/specification/foo/Foo.foo", - ); + const result = await new FolderStructureRule().execute("/gitroot/specification/foo/Foo.foo"); assert(result.errorOutput); assert(result.errorOutput.includes("must be capitalized")); }); it("should fail if second level folder is data-plane", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); - const result = await new FolderStructureRule().execute( - host, - "/gitroot/specification/foo/data-plane", - ); + const result = await new FolderStructureRule().execute("/gitroot/specification/foo/data-plane"); assert(result.errorOutput); - assert(result.errorOutput.includes("must be capitalized")); + assert(result.errorOutput.includes("does not match regex")); }); it("should fail if second level folder is resource-manager", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/resource-manager", ); assert(result.errorOutput); - assert(result.errorOutput.includes("must be capitalized")); + assert(result.errorOutput.includes("does not match regex")); }); it("should fail if Shared does not follow Management ", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; + }); + normalizePathSpy.mockReturnValue("/gitroot"); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management.Foo.Shared", ); assert(result.errorOutput); @@ -131,24 +125,21 @@ describe("folder-structure", function () { }); it("should fail if folder doesn't contain main.tsp nor client.tsp", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.checkFileExists = async (file: string) => { + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + fileExistsSpy.mockImplementation(async (file: string) => { if (file.includes("main.tsp")) { return false; } else if (file.includes("client.tsp")) { return false; } return true; - }; + }); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management", ); @@ -157,24 +148,21 @@ describe("folder-structure", function () { }); it("should fail if folder doesn't contain examples when main.tsp exists", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.checkFileExists = async (file: string) => { + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + fileExistsSpy.mockImplementation(async (file: string) => { if (file.includes("main.tsp")) { return true; } else if (file.includes("examples")) { return false; } return true; - }; + }); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management", ); @@ -183,22 +171,19 @@ describe("folder-structure", function () { }); it("should fail if non-shared folder doesn't contain tspconfig", async function () { - let host = new TsvTestHost(); - host.globby = async () => { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/bar/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.checkFileExists = async (file: string) => { + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + fileExistsSpy.mockImplementation(async (file: string) => { if (file.includes("tspconfig.yaml")) { return false; } return true; - }; + }); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management", ); @@ -206,93 +191,155 @@ describe("folder-structure", function () { assert(result.errorOutput.includes("must contain")); }); - it("should succeed with resource-manager/Management", async function() { - let host = new TsvTestHost(); - host.globby = async () => { + it("should succeed with resource-manager/Management", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/Foo.Management/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.readTspConfig = async (_folder: string) => ` + }); + normalizePathSpy.mockReturnValue("/gitroot"); + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" -`; +`, + ); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management", ); assert(result.success); }); - it("should succeed with data-plane/NoManagement", async function() { - let host = new TsvTestHost(); - host.globby = async () => { + it("should succeed with data-plane/NoManagement", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/Foo/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.readTspConfig = async (_folder: string) => ` + }); + normalizePathSpy.mockReturnValue("/gitroot"); + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "data-plane" -`; - - const result = await new FolderStructureRule().execute( - host, - "/gitroot/specification/foo/Foo", +`, ); + const result = await new FolderStructureRule().execute("/gitroot/specification/foo/Foo"); + assert(result.success); }); - it("should fail with resource-manager/NoManagement", async function() { - let host = new TsvTestHost(); - host.globby = async () => { + it("should fail with resource-manager/NoManagement", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/Foo/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.readTspConfig = async (_folder: string) => ` + }); + normalizePathSpy.mockReturnValue("/gitroot"); + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" -`; - - const result = await new FolderStructureRule().execute( - host, - "/gitroot/specification/foo/Foo", +`, ); + const result = await new FolderStructureRule().execute("/gitroot/specification/foo/Foo"); + assert(result.errorOutput); assert(result.errorOutput.includes(".Management")); }); - it("should fail with data-plane/Management", async function() { - let host = new TsvTestHost(); - host.globby = async () => { + it("should fail with data-plane/Management", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { return ["/foo/Foo.Management/tspconfig.yaml"]; - }; - host.normalizePath = () => { - return "/gitroot"; - }; - host.readTspConfig = async (_folder: string) => ` + }); + normalizePathSpy.mockReturnValue("/gitroot"); + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "data-plane" -`; +`, + ); const result = await new FolderStructureRule().execute( - host, "/gitroot/specification/foo/Foo.Management", ); assert(result.errorOutput); assert(result.errorOutput.includes(".Management")); }); + + it("v2: should fail if no tspconfig.yaml", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { + return ["main.tsp"]; + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + fileExistsSpy.mockImplementation(async (file: string) => { + if (file.includes("tspconfig.yaml")) { + return false; + } + return true; + }); + + const result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/data-plane/Foo", + ); + + assert(result.errorOutput?.includes("must contain")); + }); + + it("v2: should fail if incorrect folder depth", async function () { + vi.mocked(globby.globby).mockImplementation(async () => { + return ["tspconfig.yaml"]; + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + let result = await new FolderStructureRule().execute("/gitroot/specification/foo/data-plane"); + assert(result.errorOutput?.includes("level under")); + + result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/data-plane/Foo/too-deep", + ); + assert(result.errorOutput?.includes("level under")); + + result = await new FolderStructureRule().execute("/gitroot/specification/foo/resource-manager"); + assert(result.errorOutput?.includes("levels under")); + + result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/resource-manager/RP.Namespace", + ); + assert(result.errorOutput?.includes("levels under")); + + result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/resource-manager/RP.Namespace/FooManagement/too-deep", + ); + assert(result.errorOutput?.includes("levels under")); + }); + + it("v2: should succeed with data-plane", async function () { + vi.mocked(globby.globby).mockImplementation(async (patterns) => { + return patterns[0].includes("tspconfig") ? ["tspconfig.yaml"] : ["main.tsp"]; + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + const result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/data-plane/Foo", + ); + + assert(result.success); + }); + + it("v2: should succeed with resource-manager", async function () { + vi.mocked(globby.globby).mockImplementation(async (patterns) => { + return patterns[0].includes("tspconfig") ? ["tspconfig.yaml"] : ["main.tsp"]; + }); + normalizePathSpy.mockReturnValue("/gitroot"); + + const result = await new FolderStructureRule().execute( + "/gitroot/specification/foo/resource-manager/Microsoft.Foo/FooManagement", + ); + + assert(result.success); + }); }); diff --git a/eng/tools/typespec-validation/test/linter-ruleset.test.ts b/eng/tools/typespec-validation/test/linter-ruleset.test.ts index 83b4ffb2928a..49a1098b6b89 100644 --- a/eng/tools/typespec-validation/test/linter-ruleset.test.ts +++ b/eng/tools/typespec-validation/test/linter-ruleset.test.ts @@ -1,105 +1,129 @@ -import { describe, it } from "vitest"; +import { contosoTspConfig } from "@azure-tools/specs-shared/test/examples"; +import { strict as assert } from "node:assert"; import { join } from "path"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; import { LinterRulesetRule } from "../src/rules/linter-ruleset.js"; -import { TsvTestHost } from "./tsv-test-host.js"; -import { strict as assert } from "node:assert"; + +import * as utils from "../src/utils.js"; +import { mockFolder } from "./mocks.js"; describe("linter-ruleset", function () { + let fileExistsSpy: MockInstance; + let readTspConfigSpy: MockInstance; + + beforeEach(() => { + fileExistsSpy = vi.spyOn(utils, "fileExists").mockResolvedValue(true); + readTspConfigSpy = vi.spyOn(utils, "readTspConfig").mockResolvedValue(contosoTspConfig); + }); + + afterEach(() => { + fileExistsSpy.mockReset(); + readTspConfigSpy.mockReset(); + }); + it("succeeds with default config", async function () { - const host = new TsvTestHost(); - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); + const result = await new LinterRulesetRule().execute(mockFolder); assert(result.success); }); it("succeeds with resource-manager/resource-manager", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" linter: extends: - "@azure-tools/typespec-azure-rulesets/resource-manager" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(result.success); }); it("succeeds with data-plane/data-plane", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "data-plane" linter: extends: - "@azure-tools/typespec-azure-rulesets/data-plane" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(result.success); }); it("succeeds with client.tsp/data-plane", async function () { - const host = new TsvTestHost(); - host.checkFileExists = async (file: string) => file === join(TsvTestHost.folder, "client.tsp"); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` linter: extends: - "@azure-tools/typespec-azure-rulesets/data-plane" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + + fileExistsSpy.mockImplementation( + async (file: string) => file === join(mockFolder, "client.tsp"), + ); + + const result = await new LinterRulesetRule().execute(mockFolder); assert(result.success); }); it("fails with no-config", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ""; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); + readTspConfigSpy.mockImplementation(async (_folder: string) => ""); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); it("fails with resource-manager/no-linter", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); it("fails with resource-manager/data-plane", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" linter: extends: - "@azure-tools/typespec-azure-rulesets/data-plane" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); it("fails with data-plane/resource-manager", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "data-plane" linter: extends: - "@azure-tools/typespec-azure-rulesets/resource-manager" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); it("fails with data-plane/old-and-new", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "data-plane" @@ -107,14 +131,15 @@ linter: extends: - "@azure-tools/typespec-azure-core/all" - "@azure-tools/typespec-azure-rulesets/data-plane" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); it("fails with resource-manager/old-and-new", async function () { - const host = new TsvTestHost(); - host.readTspConfig = async (_folder: string) => ` + readTspConfigSpy.mockImplementation( + async (_folder: string) => ` options: "@azure-tools/typespec-autorest": azure-resource-provider-folder: "resource-manager" @@ -122,8 +147,9 @@ linter: extends: - "@azure-tools/typespec-azure-resource-manager/all" - "@azure-tools/typespec-azure-rulesets/resource-manager" -`; - const result = await new LinterRulesetRule().execute(host, TsvTestHost.folder); +`, + ); + const result = await new LinterRulesetRule().execute(mockFolder); assert(!result.success); }); diff --git a/eng/tools/typespec-validation/test/mocks.ts b/eng/tools/typespec-validation/test/mocks.ts new file mode 100644 index 000000000000..3f7a06900c4e --- /dev/null +++ b/eng/tools/typespec-validation/test/mocks.ts @@ -0,0 +1,34 @@ +import { vi } from "vitest"; + +export const mockFolder = "specification/foo/Foo"; + +export function mockFsPromises() { + vi.mock("fs/promises", () => ({ + readFile: vi.fn().mockResolvedValue('{"info": {"x-typespec-generated": true}}'), + })); +} + +export function mockGlobby() { + vi.mock("globby", () => ({ + globby: vi.fn().mockResolvedValue([]), + })); +} + +export function mockSimpleGit() { + vi.mock("simple-git", () => ({ + simpleGit: vi.fn().mockReturnValue({ + revparse: vi.fn().mockResolvedValue(""), + status: vi.fn().mockResolvedValue({ + modified: [], + not_added: [], + isClean: () => true, + }), + }), + })); +} + +export function mockAll() { + mockFsPromises(); + mockGlobby(); + mockSimpleGit(); +} diff --git a/eng/tools/typespec-validation/test/npm-prefix.test.ts b/eng/tools/typespec-validation/test/npm-prefix.test.ts index 281cccaf0a42..ebccfc318d2a 100644 --- a/eng/tools/typespec-validation/test/npm-prefix.test.ts +++ b/eng/tools/typespec-validation/test/npm-prefix.test.ts @@ -1,70 +1,63 @@ -import { describe, it } from "vitest"; -import { NpmPrefixRule } from "../src/rules/npm-prefix.js"; -import { IGitOperation, TsvTestHost } from "./tsv-test-host.js"; +import { mockFolder, mockSimpleGit } from "./mocks.js"; +mockSimpleGit(); + +import * as simpleGit from "simple-git"; + import { strict as assert } from "node:assert"; import path from "path"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; +import { NpmPrefixRule } from "../src/rules/npm-prefix.js"; + +import * as utils from "../src/utils.js"; describe("npm-prefix", function () { + let runNpmSpy: MockInstance; + + beforeEach(() => { + runNpmSpy = vi + .spyOn(utils, "runNpm") + .mockImplementation(async (args, cwd) => [null, `runNpm ${args.join(" ")} at ${cwd}`, ""]); + }); + + afterEach(() => { + runNpmSpy.mockReset(); + }); + it("should succeed if node returns inconsistent drive letter capitalization", async function () { - let host = new TsvTestHost(path.win32); - host.runCmd = async (cmd: string, _cwd: string): Promise<[Error | null, string, string]> => { - if (cmd.includes("npm prefix")) { - return [null, `C:${path.sep}Git${path.sep}azure-rest-api-specs`, ""]; - } else { - return [null, "", ""]; - } - }; - host.gitOperation = (_folder: string): IGitOperation => { - return { - status: () => { - return Promise.resolve({ - modified: [], - not_added: [], - isClean: () => true, - }); - }, - diff: () => { - return Promise.resolve(""); - }, - revparse: () => { - return Promise.resolve("c:/Git/azure-rest-api-specs"); - }, - }; - }; + runNpmSpy.mockImplementation( + async (args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + if (args.includes("prefix")) { + return [null, `C:${path.sep}Git${path.sep}azure-rest-api-specs`, ""]; + } else { + return [null, "", ""]; + } + }, + ); + + vi.mocked(simpleGit.simpleGit().revparse).mockResolvedValue("c:/Git/azure-rest-api-specs"); + + vi.spyOn(utils, "normalizePath").mockImplementation((folder) => + utils.normalizePathImpl(folder, path.win32), + ); - const result = await new NpmPrefixRule().execute(host, TsvTestHost.folder); + const result = await new NpmPrefixRule().execute(mockFolder); assert(result.success); }); it("should fail if npm prefix mismatch", async function () { - let host = new TsvTestHost(); - host.runCmd = async (cmd: string, _cwd: string): Promise<[Error | null, string, string]> => { - if (cmd.includes("npm prefix")) { - return [null, "/Git/azure-rest-api-specs/specification/foo", ""]; - } else { - return [null, "", ""]; - } - }; - host.gitOperation = (_folder: string): IGitOperation => { - return { - status: () => { - return Promise.resolve({ - modified: [], - not_added: [], - isClean: () => true, - }); - }, - diff: () => { - return Promise.resolve(""); - }, - revparse: () => { - return Promise.resolve("/Git/azure-rest-api-specs"); - }, - }; - }; + runNpmSpy.mockImplementation( + async (args: string[], _cwd: string): Promise<[Error | null, string, string]> => { + if (args.includes("prefix")) { + return [null, "/Git/azure-rest-api-specs/specification/foo", ""]; + } else { + return [null, "", ""]; + } + }, + ); + vi.mocked(simpleGit.simpleGit().revparse).mockResolvedValue("/Git/azure-rest-api-specs"); - const result = await new NpmPrefixRule().execute(host, TsvTestHost.folder); + const result = await new NpmPrefixRule().execute(mockFolder); assert(!result.success); }); diff --git a/eng/tools/typespec-validation/test/sdk-tspconfig-validation.test.ts b/eng/tools/typespec-validation/test/sdk-tspconfig-validation.test.ts index 0c0944d32832..1bd73e79c9e8 100644 --- a/eng/tools/typespec-validation/test/sdk-tspconfig-validation.test.ts +++ b/eng/tools/typespec-validation/test/sdk-tspconfig-validation.test.ts @@ -1,4 +1,4 @@ -import { describe, it } from "vitest"; +import { afterEach, beforeEach, describe, it, MockInstance, vi } from "vitest"; import { SdkTspConfigValidationRule, @@ -6,11 +6,13 @@ import { TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule, TspConfigTsMgmtModularPackageDirectorySubRule, TspConfigTsMgmtModularPackageNameMatchPatternSubRule, + TspConfigTsDpPackageDirectorySubRule, + TspConfigTsRlcDpPackageNameMatchPatternSubRule, TspConfigGoMgmtServiceDirMatchPatternSubRule, TspConfigGoMgmtPackageDirectorySubRule, TspConfigGoMgmtModuleEqualStringSubRule, TspConfigGoMgmtFixConstStutteringTrueSubRule, - TspConfigGoMgmtGenerateExamplesTrueSubRule, + TspConfigGoMgmtGenerateSamplesTrueSubRule, TspConfigGoMgmtHeadAsBooleanTrueSubRule, TspConfigGoAzGenerateFakesTrueSubRule, TspConfigGoAzInjectSpansTrueSubRule, @@ -19,7 +21,7 @@ import { TspConfigGoDpServiceDirMatchPatternSubRule, TspConfigJavaAzPackageDirectorySubRule, TspConfigPythonMgmtPackageDirectorySubRule, - TspConfigPythonAzPackageNameEqualStringSubRule, + TspConfigPythonMgmtNamespaceSubRule, TspConfigPythonAzGenerateTestTrueSubRule, TspConfigPythonAzGenerateSampleTrueSubRule, TspConfigCsharpAzPackageDirectorySubRule, @@ -28,12 +30,15 @@ import { TspConfigCsharpMgmtPackageDirectorySubRule, TspconfigSubRuleBase, TspConfigPythonDpPackageDirectorySubRule, + TspConfigTsMlcDpPackageNameMatchPatternSubRule, } from "../src/rules/sdk-tspconfig-validation.js"; -import { TsvTestHost } from "./tsv-test-host.js"; +import { contosoTspConfig } from "@azure-tools/specs-shared/test/examples"; import { join } from "path"; import { strictEqual } from "node:assert"; import { stringify } from "yaml"; +import * as utils from "../src/utils.js"; + export function createParameterExample(...pairs: { key: string; value: string | boolean | {} }[]) { const obj: Record = { parameters: {} }; for (const pair of pairs) { @@ -66,12 +71,18 @@ export function createEmitterOptionExample( return content; } +// TODO: remove when @azure-tools/typespec-csharp is ready for validating tspconfig +function shouldBeTrueOnFailSubRuleValidation(emitterName: string) { + return emitterName === "@azure-tools/typespec-csharp" ? true : false; +} + function createParameterTestCases( folder: string, key: string, validValue: boolean | string, invalidValue: boolean | string, subRules: TspconfigSubRuleBase[], + additionalOptions: Record = {}, ): Case[] { const cases: Case[] = [ { @@ -80,6 +91,7 @@ function createParameterTestCases( tspconfigContent: createParameterExample({ key: key, value: validValue }), success: true, subRules, + additionalOptions, }, { description: `Validate parameter ${key} with invalid value ${invalidValue}`, @@ -87,6 +99,7 @@ function createParameterTestCases( tspconfigContent: createParameterExample({ key: key, value: invalidValue }), success: false, subRules, + additionalOptions, }, { description: `Validate parameter ${key} with undefined value`, @@ -94,6 +107,7 @@ function createParameterTestCases( tspconfigContent: "", success: false, subRules, + additionalOptions, }, ]; return cases; @@ -107,6 +121,7 @@ function createEmitterOptionTestCases( invalidValue: boolean | string, subRules: TspconfigSubRuleBase[], allowUndefined: boolean = false, + additionalOptions: Record = {}, ): Case[] { const cases: Case[] = []; @@ -114,7 +129,11 @@ function createEmitterOptionTestCases( cases.push({ description: `Validate ${language}'s option:${key} with valid value ${validValue}`, folder, - tspconfigContent: createEmitterOptionExample(emitterName, { key: key, value: validValue }), + tspconfigContent: createEmitterOptionExample( + emitterName, + { key: key, value: validValue }, + ...Object.entries(additionalOptions).map(([key, value]) => ({ key, value })), + ), success: true, subRules, }); @@ -122,19 +141,26 @@ function createEmitterOptionTestCases( cases.push({ description: `Validate ${language}'s option:${key} with invalid value ${invalidValue}`, folder, - tspconfigContent: createEmitterOptionExample(emitterName, { - key: key, - value: invalidValue, - }), - success: false, + tspconfigContent: createEmitterOptionExample( + emitterName, + { + key: key, + value: invalidValue, + }, + ...Object.entries(additionalOptions).map(([key, value]) => ({ key, value })), + ), + success: shouldBeTrueOnFailSubRuleValidation(emitterName), subRules, }); cases.push({ description: `Validate ${language}'s option:${key} with undefined value`, folder, - tspconfigContent: createEmitterOptionExample(emitterName), - success: allowUndefined ? true : false, + tspconfigContent: createEmitterOptionExample( + emitterName, + ...Object.entries(additionalOptions).map(([key, value]) => ({ key, value })), + ), + success: allowUndefined ? true : shouldBeTrueOnFailSubRuleValidation(emitterName), subRules, }); @@ -142,11 +168,15 @@ function createEmitterOptionTestCases( cases.push({ description: `Validate ${language}'s option:${key} with incomplete key`, folder, - tspconfigContent: createEmitterOptionExample(emitterName, { - key: key.split(".").slice(0, -1).join("."), - value: validValue, - }), - success: false, + tspconfigContent: createEmitterOptionExample( + emitterName, + { + key: key.split(".").slice(0, -1).join("."), + value: validValue, + }, + ...Object.entries(additionalOptions).map(([key, value]) => ({ key, value })), + ), + success: shouldBeTrueOnFailSubRuleValidation(emitterName), subRules, }); } @@ -160,6 +190,7 @@ interface Case { tspconfigContent: string; success: boolean; ignoredKeyPaths?: string[]; + additionalOptions?: Record; } const managementTspconfigFolder = "contosowidgetmanager/Contoso.Management/"; @@ -173,15 +204,6 @@ const commonAzureServiceDirTestCases = createParameterTestCases( ); const tsManagementExperimentalExtensibleEnumsTestCases = createEmitterOptionTestCases( - "@azure-tools/typespec-ts", - managementTspconfigFolder, - "experimentalExtensibleEnums", - true, - false, - [new TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule()], -); - -const newTsManagementExperimentalExtensibleEnumsTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-ts", managementTspconfigFolder, "experimental-extensible-enums", @@ -190,18 +212,6 @@ const newTsManagementExperimentalExtensibleEnumsTestCases = createEmitterOptionT [new TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule()], ); -const mixTsManagementExperimentalExtensibleEnumsTestCases = { - description: `Validate @azure-tools/typespec-ts's mix options: experimental-extensible-enums/experimentalExtensibleEnums with different values`, - folder: "aaa.Management", - tspconfigContent: createEmitterOptionExample( - "@azure-tools/typespec-ts", - { key: "experimentalExtensibleEnums", value: true }, - { key: "experimental-extensible-enums", value: false }, - ), - success: false, - subRules: [new TspConfigTsMgmtModularExperimentalExtensibleEnumsTrueSubRule()], -}; - const tsManagementPackageDirTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-ts", managementTspconfigFolder, @@ -214,32 +224,40 @@ const tsManagementPackageDirTestCases = createEmitterOptionTestCases( const tsManagementPackageNameTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-ts", managementTspconfigFolder, - "packageDetails.name", + "package-details.name", "@azure/arm-aaa-bbb", "@azure/aaa-bbb", [new TspConfigTsMgmtModularPackageNameMatchPatternSubRule()], ); -const newTsManagementPackageNameTestCases = createEmitterOptionTestCases( +const tsDpPackageDirTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-ts", - managementTspconfigFolder, + "", + "package-dir", + "arm-aaa-rest", + "aaa--rest", + [new TspConfigTsDpPackageDirectorySubRule()], +); + +const tsDpPackageNameTestCases = createEmitterOptionTestCases( + "@azure-tools/typespec-ts", + "", "package-details.name", - "@azure/arm-aaa-bbb", + "@azure-rest/aaa-bbb", "@azure/aaa-bbb", - [new TspConfigTsMgmtModularPackageNameMatchPatternSubRule()], + [new TspConfigTsRlcDpPackageNameMatchPatternSubRule()], ); -const mixTsManagementPackageNameTestCases = { - description: `Validate @azure-tools/typespec-ts's mix options: package-details/packageDetails with different values`, - folder: "aaa.Management", - tspconfigContent: createEmitterOptionExample( - "@azure-tools/typespec-ts", - { key: "packageDetails.name", value: "@azure/arm-aaa-bbb" }, - { key: "package-details.name", value: "@azure/aaa-bbb" }, - ), - success: false, - subRules: [new TspConfigTsMgmtModularPackageNameMatchPatternSubRule()], -}; +const tsDpModularPackageNameTestCases = createEmitterOptionTestCases( + "@azure-tools/typespec-ts", + "", + "package-details.name", + "@azure/aaa-bbb", + "azure/aaa-bbb", + [new TspConfigTsMlcDpPackageNameMatchPatternSubRule()], + false, + { "is-modular-library": true }, // Additional option added +); const goManagementServiceDirTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-go", @@ -280,10 +298,10 @@ const goManagementFixConstStutteringTestCases = createEmitterOptionTestCases( const goManagementGenerateExamplesTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-go", managementTspconfigFolder, - "generate-examples", + "generate-samples", true, false, - [new TspConfigGoMgmtGenerateExamplesTrueSubRule()], + [new TspConfigGoMgmtGenerateSamplesTrueSubRule()], ); const goManagementGenerateFakesTestCases = createEmitterOptionTestCases( @@ -377,13 +395,13 @@ const pythonManagementPackageDirTestCases = createEmitterOptionTestCases( [new TspConfigPythonMgmtPackageDirectorySubRule()], ); -const pythonManagementPackageNameTestCases = createEmitterOptionTestCases( +const pythonManagementNamespaceTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-python", managementTspconfigFolder, - "package-name", - "{package-dir}", - "aaa", - [new TspConfigPythonAzPackageNameEqualStringSubRule()], + "namespace", + "azure.mgmt.aaa", + "azure-aaa", + [new TspConfigPythonMgmtNamespaceSubRule()], ); const pythonManagementGenerateTestTestCases = createEmitterOptionTestCases( @@ -413,15 +431,6 @@ const pythonDpPackageDirTestCases = createEmitterOptionTestCases( [new TspConfigPythonDpPackageDirectorySubRule()], ); -const pythonAzPackageNameTestCases = createEmitterOptionTestCases( - "@azure-tools/typespec-python", - "", - "package-name", - "{package-dir}", - "aaa", - [new TspConfigPythonAzPackageNameEqualStringSubRule()], -); - const pythonAzGenerateTestTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-python", "", @@ -458,6 +467,42 @@ const csharpAzNamespaceTestCases = createEmitterOptionTestCases( [new TspConfigCsharpAzNamespaceEqualStringSubRule()], ); +const csharpAzNamespaceWithPackageDirTestCases: Case[] = [ + { + description: `Validate csharp\'s option: namespace is equal to {package-dir} and package-dir exists`, + folder: "", + tspconfigContent: createEmitterOptionExample( + "@azure-tools/typespec-csharp", + { key: "namespace", value: "{package-dir}" }, + { key: "package-dir", value: "Azure.AAA" }, + ), + success: true, + subRules: [new TspConfigCsharpAzNamespaceEqualStringSubRule()], + }, + { + description: `Validate csharp\'s option: namespace is equal to package-dir`, + folder: "", + tspconfigContent: createEmitterOptionExample( + "@azure-tools/typespec-csharp", + { key: "namespace", value: "Azure.AAA" }, + { key: "package-dir", value: "Azure.AAA" }, + ), + success: true, + subRules: [new TspConfigCsharpAzNamespaceEqualStringSubRule()], + }, + { + description: `Validate csharp\'s option: namespace is not equal to package-dir`, + folder: "", + tspconfigContent: createEmitterOptionExample( + "@azure-tools/typespec-csharp", + { key: "namespace", value: "namespace" }, + { key: "package-dir", value: "Azure.AAA" }, + ), + success: shouldBeTrueOnFailSubRuleValidation("@azure-tools/typespec-csharp"), + subRules: [new TspConfigCsharpAzNamespaceEqualStringSubRule()], + }, +]; + const csharpAzClearOutputFolderTestCases = createEmitterOptionTestCases( "@azure-tools/typespec-csharp", "", @@ -476,10 +521,22 @@ const csharpMgmtPackageDirTestCases = createEmitterOptionTestCases( [new TspConfigCsharpMgmtPackageDirectorySubRule()], ); +const suppressEntireRuleTestCase: Case = { + description: "Suppress entire rule", + folder: managementTspconfigFolder, + subRules: [new TspConfigCommonAzServiceDirMatchPatternSubRule()], + tspconfigContent: ` +parameters: +service-dir-x: "" +`, + success: true, + ignoredKeyPaths: [], +}; + const suppressSubRuleTestCases: Case[] = [ { description: "Suppress parameter", - folder: "", + folder: managementTspconfigFolder, subRules: [new TspConfigCommonAzServiceDirMatchPatternSubRule()], tspconfigContent: ` parameters: @@ -500,20 +557,50 @@ options: success: true, ignoredKeyPaths: ["options.@azure-tools/typespec-ts.package-dir"], }, + { + description: "Suppress option with wildcard at the end", + folder: managementTspconfigFolder, + subRules: [ + new TspConfigGoMgmtPackageDirectorySubRule(), + new TspConfigGoMgmtModuleEqualStringSubRule(), + new TspConfigGoMgmtFixConstStutteringTrueSubRule(), + ], + tspconfigContent: ` +options: + "@azure-tools/typespec-go": + package-dir: "wrong/directory" + module-name: "invalid-module" + generate-consts: false +`, + success: true, + ignoredKeyPaths: ["options.@azure-tools/typespec-go.*"], + }, ]; describe("tspconfig", function () { + let fileExistsSpy: MockInstance; + let readTspConfigSpy: MockInstance; + + beforeEach(() => { + fileExistsSpy = vi.spyOn(utils, "fileExists").mockResolvedValue(true); + readTspConfigSpy = vi.spyOn(utils, "readTspConfig").mockResolvedValue(contosoTspConfig); + }); + + afterEach(() => { + fileExistsSpy.mockReset(); + readTspConfigSpy.mockReset(); + }); + it.each([ // common ...commonAzureServiceDirTestCases, // ts - ...newTsManagementExperimentalExtensibleEnumsTestCases, ...tsManagementExperimentalExtensibleEnumsTestCases, ...tsManagementPackageDirTestCases, - ...newTsManagementPackageNameTestCases, ...tsManagementPackageNameTestCases, - mixTsManagementExperimentalExtensibleEnumsTestCases, - mixTsManagementPackageNameTestCases, + ...tsDpPackageDirTestCases, + ...tsDpPackageNameTestCases, + ...tsDpModularPackageNameTestCases, // go ...goManagementServiceDirTestCases, ...goManagementPackageDirTestCases, @@ -532,11 +619,10 @@ describe("tspconfig", function () { ...javaManagementPackageDirTestCases, // python ...pythonManagementPackageDirTestCases, - ...pythonManagementPackageNameTestCases, + ...pythonManagementNamespaceTestCases, ...pythonManagementGenerateTestTestCases, ...pythonManagementGenerateSampleTestCases, ...pythonDpPackageDirTestCases, - ...pythonAzPackageNameTestCases, ...pythonAzGenerateTestTestCases, ...pythonAzGenerateSampleTestCases, // csharp @@ -544,15 +630,35 @@ describe("tspconfig", function () { ...csharpAzNamespaceTestCases, ...csharpAzClearOutputFolderTestCases, ...csharpMgmtPackageDirTestCases, - // suppression - ...suppressSubRuleTestCases, + ...csharpAzNamespaceWithPackageDirTestCases, ])(`$description`, async (c: Case) => { - let host = new TsvTestHost(); - host.checkFileExists = async (file: string) => { + readTspConfigSpy.mockImplementation(async (_folder: string) => c.tspconfigContent); + vi.spyOn(utils, "getSuppressions").mockImplementation(async (_path: string) => [ + { + tool: "TypeSpecValidation", + paths: ["tspconfig.yaml"], + reason: "Test reason", + rules: ["NOT-SdkTspConfigValidation"], + subRules: c.ignoredKeyPaths, + }, + ]); + + fileExistsSpy.mockImplementation(async (file: string) => { return file === join(c.folder, "tspconfig.yaml"); - }; - host.readTspConfig = async (_folder: string) => c.tspconfigContent; - host.getSuppressions = async (_path: string) => [ + }); + + const rule = new SdkTspConfigValidationRule(c.subRules); + const result = await rule.execute(c.folder); + strictEqual(result.success, c.success); + if (c.success) + strictEqual(result.stdOutput?.includes("[SdkTspConfigValidation]: validation passed."), true); + if (!c.success) + strictEqual(result.stdOutput?.includes("[SdkTspConfigValidation]: validation failed."), true); + }); + + it.each([...suppressSubRuleTestCases])(`$description`, async (c: Case) => { + readTspConfigSpy.mockImplementation(async (_folder: string) => c.tspconfigContent); + vi.spyOn(utils, "getSuppressions").mockImplementation(async (_path: string) => [ { tool: "TypeSpecValidation", paths: ["tspconfig.yaml"], @@ -560,13 +666,107 @@ describe("tspconfig", function () { rules: ["SdkTspConfigValidation"], subRules: c.ignoredKeyPaths, }, - ]; + ]); + + fileExistsSpy.mockImplementation(async (file: string) => { + return file === join(c.folder, "tspconfig.yaml"); + }); + const rule = new SdkTspConfigValidationRule(c.subRules); - const result = await rule.execute(host, c.folder); - strictEqual(result.success, true); + const result = await rule.execute(c.folder); + const returnSuccess = c.folder.includes(".Management") ? c.success : true; + strictEqual(result.success, returnSuccess); if (c.success) strictEqual(result.stdOutput?.includes("[SdkTspConfigValidation]: validation passed."), true); if (!c.success) strictEqual(result.stdOutput?.includes("[SdkTspConfigValidation]: validation failed."), true); }); + + it.each([suppressEntireRuleTestCase])(`$description`, async (c: Case) => { + readTspConfigSpy.mockImplementation(async (_folder: string) => c.tspconfigContent); + vi.spyOn(utils, "getSuppressions").mockImplementation(async (_path: string) => [ + { + tool: "TypeSpecValidation", + paths: ["tspconfig.yaml"], + reason: "Test reason", + rules: ["SdkTspConfigValidation"], + }, + ]); + + fileExistsSpy.mockImplementation(async (file: string) => { + return file === join(c.folder, "tspconfig.yaml"); + }); + + const rule = new SdkTspConfigValidationRule(c.subRules); + const result = await rule.execute(c.folder); + strictEqual(result.success, true); + strictEqual(result.stdOutput?.includes("[SdkTspConfigValidation]: validation skipped."), true); + }); + + it("Tests wildcard suppression for multiple AWS connector services", async () => { + // List of AWS connector service paths to test + const awsServiceFolders = [ + "awsconnector/AccessAnalyzerAnalyzer.Management", + "awsconnector/AcmCertificateSummary.Management", + "awsconnector/ApiGatewayRestApi.Management", + "awsconnector/ApiGatewayStage.Management", + "awsconnector/AppSyncGraphqlApi.Management", + "awsconnector/AutoScalingAutoScalingGroup.Management", + "awsconnector/Awsconnector.Management", + ]; + + // Mock suppressions.yaml containing a wildcard path for awsconnector/*/tspconfig.yaml + const suppressionsSpy = vi + .spyOn(utils, "getSuppressions") + .mockImplementation(async (_path: string) => [ + { + tool: "TypeSpecValidation", + paths: ["awsconnector/*/tspconfig.yaml"], // Single wildcard pattern to match all paths + reason: "AWS Connector services have special requirements", + rules: ["SdkTspConfigValidation"], + subRules: ["parameters.service-dir.default"], + }, + ]); + + // Test each AWS connector service path + for (const awsServiceFolder of awsServiceFolders) { + // Reset mocks for each service + suppressionsSpy.mockClear(); + + // Mock configuration content + const tspconfigContent = ` +parameters: + service-dir: "${awsServiceFolder}" +`; + + // Setup mocks + readTspConfigSpy.mockImplementation(async () => tspconfigContent); + fileExistsSpy.mockImplementation(async (file: string) => { + return file === join(awsServiceFolder, "tspconfig.yaml"); + }); + + // Create validation rule and execute + const rule = new SdkTspConfigValidationRule([ + new TspConfigCommonAzServiceDirMatchPatternSubRule(), + ]); + const result = await rule.execute(awsServiceFolder); + + // Validate that validation passes for each service + strictEqual(result.success, true, `Validation should pass for ${awsServiceFolder}`); + strictEqual( + result.stdOutput?.includes("[SdkTspConfigValidation]: validation passed."), + true, + `Output should indicate validation passed for ${awsServiceFolder}`, + ); + + // Verify suppressions were called with the correct path + strictEqual( + suppressionsSpy.mock.calls.some( + (call) => call[0] === join(awsServiceFolder, "tspconfig.yaml"), + ), + true, + `getSuppressions should be called with path ${join(awsServiceFolder, "tspconfig.yaml")}`, + ); + } + }); }); diff --git a/eng/tools/typespec-validation/test/tsv-test-host.ts b/eng/tools/typespec-validation/test/tsv-test-host.ts deleted file mode 100644 index 29c04c2b3c31..000000000000 --- a/eng/tools/typespec-validation/test/tsv-test-host.ts +++ /dev/null @@ -1,94 +0,0 @@ -import defaultPath, { PlatformPath } from "path"; -import { Suppression } from "suppressions"; -import { RuleResult } from "../src/rule-result.js"; -import { IGitOperation, TsvHost } from "../src/tsv-host.js"; -import { normalizePath } from "../src/utils.js"; - -export { IGitOperation } from "../src/tsv-host.js"; - -export class TsvTestHost implements TsvHost { - path: PlatformPath; - - constructor(path: PlatformPath = defaultPath) { - this.path = path; - } - - static get folder() { - return "specification/foo/Foo"; - } - - gitOperation(_folder: string): IGitOperation { - return { - status: () => { - return Promise.resolve({ - modified: [], - not_added: [], - isClean: () => true, - }); - }, - diff: () => { - return Promise.resolve(""); - }, - revparse: () => { - return Promise.resolve(""); - }, - }; - } - - async runCmd(cmd: string, cwd: string): Promise<[Error | null, string, string]> { - let err = null; - let stdout = `default ${cmd} at ${cwd}`; - let stderr = ""; - - return [err, stdout, stderr]; - } - - async checkFileExists(_file: string): Promise { - return true; - } - - async isDirectory(_path: string): Promise { - return true; - } - - normalizePath(folder: string): string { - return normalizePath(folder, this.path); - } - - async gitDiffTopSpecFolder(host: TsvHost, folder: string): Promise { - let success = true; - let stdout = `Running git diff on folder ${folder}, running default cmd ${host.runCmd("", "")}`; - let stderr = ""; - - return { - success: success, - stdOutput: stdout, - errorOutput: stderr, - }; - } - - async readTspConfig(_folder: string): Promise { - // Sample config that should cause all rules to succeed - return ` -emit: - - "@azure-tools/typespec-autorest" -linter: - extends: - - "@azure-tools/typespec-azure-rulesets/data-plane" -options: - "@azure-tools/typespec-autorest": - azure-resource-provider-folder: "data-plane" - emitter-output-dir: "{project-root}/.." - examples-directory: "examples" - output-file: "{azure-resource-provider-folder}/{service-name}/{version-status}/{version}/openapi.json" -`; - } - - async globby(patterns: string[]): Promise { - return Promise.resolve(patterns); - } - - async getSuppressions(_path: string): Promise { - return Promise.resolve([]); - } -} diff --git a/eng/tools/typespec-validation/test/util.test.ts b/eng/tools/typespec-validation/test/util.test.ts index 1774ccf45e3a..93ccbb355d0f 100644 --- a/eng/tools/typespec-validation/test/util.test.ts +++ b/eng/tools/typespec-validation/test/util.test.ts @@ -1,9 +1,11 @@ -import { describe, it } from "vitest"; -import { gitDiffTopSpecFolder, normalizePath } from "../src/utils.js"; +import { mockFolder, mockSimpleGit } from "./mocks.js"; +mockSimpleGit(); + import { strict as assert } from "node:assert"; -import process from "process"; import path from "path"; -import { TsvTestHost } from "./tsv-test-host.js"; +import process from "process"; +import { describe, it } from "vitest"; +import { gitDiffTopSpecFolder, normalizePath } from "../src/utils.js"; describe("util", function () { describe("normalize", function () { @@ -32,7 +34,7 @@ describe("util", function () { }); describe("gitDiff", function () { it("should succeed if git diff produces no output", async function () { - const result = await gitDiffTopSpecFolder(new TsvTestHost(), TsvTestHost.folder); + const result = await gitDiffTopSpecFolder(mockFolder); assert(result.success); }); }); diff --git a/eng/tools/typespec-validation/tsconfig.json b/eng/tools/typespec-validation/tsconfig.json index c1eaa0805646..512241b97047 100644 --- a/eng/tools/typespec-validation/tsconfig.json +++ b/eng/tools/typespec-validation/tsconfig.json @@ -2,8 +2,9 @@ "extends": "../tsconfig.json", "compilerOptions": { "outDir": "./dist", + "rootDir": ".", + "allowJs": true, }, - "references": [ - { "path": "../suppressions" } - ] + "include": ["*.ts", "src/**/*.ts", "test/**/*.ts"], + "references": [{ "path": "../suppressions" }], } diff --git a/package-lock.json b/package-lock.json index 1c6041232ed4..d20bc32dc443 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6,31 +6,32 @@ "": { "name": "azure-rest-api-specs", "devDependencies": { - "@autorest/openapi-to-typespec": "0.10.13", - "@azure-tools/spec-gen-sdk": "^0.3.2", - "@azure-tools/typespec-apiview": "0.6.0", - "@azure-tools/typespec-autorest": "0.53.0", - "@azure-tools/typespec-azure-core": "0.53.0", - "@azure-tools/typespec-azure-portal-core": "0.53.0", - "@azure-tools/typespec-azure-resource-manager": "0.53.0", - "@azure-tools/typespec-azure-rulesets": "0.53.0", - "@azure-tools/typespec-client-generator-cli": "0.16.0", - "@azure-tools/typespec-client-generator-core": "0.53.1", + "@autorest/openapi-to-typespec": "0.11.2", + "@azure-tools/spec-gen-sdk": "~0.8.0", + "@azure-tools/specs-shared": "file:.github/shared", + "@azure-tools/typespec-apiview": "0.7.2", + "@azure-tools/typespec-autorest": "0.57.1", + "@azure-tools/typespec-azure-core": "0.57.0", + "@azure-tools/typespec-azure-portal-core": "0.57.0", + "@azure-tools/typespec-azure-resource-manager": "0.57.2", + "@azure-tools/typespec-azure-rulesets": "0.57.1", + "@azure-tools/typespec-client-generator-cli": "0.23.0", + "@azure-tools/typespec-client-generator-core": "0.57.3", "@azure-tools/typespec-liftr-base": "0.8.0", "@azure/avocado": "^0.9.1", - "@typespec/compiler": "0.67.2", - "@typespec/events": "0.67.1", - "@typespec/http": "0.67.1", - "@typespec/openapi": "0.67.1", - "@typespec/openapi3": "0.67.1", - "@typespec/prettier-plugin-typespec": "0.67.1", - "@typespec/rest": "0.67.1", - "@typespec/sse": "0.67.1", - "@typespec/streams": "0.67.1", - "@typespec/versioning": "0.67.1", - "@typespec/xml": "0.67.1", + "@typespec/compiler": "1.1.0", + "@typespec/events": "0.71.0", + "@typespec/http": "1.1.0", + "@typespec/openapi": "1.1.0", + "@typespec/openapi3": "1.1.0", + "@typespec/prettier-plugin-typespec": "1.1.0", + "@typespec/rest": "0.71.0", + "@typespec/sse": "0.71.0", + "@typespec/streams": "0.71.0", + "@typespec/versioning": "0.71.0", + "@typespec/xml": "0.71.0", "azure-rest-api-specs-eng-tools": "file:eng/tools", - "oav": "^3.5.1", + "oav": "^3.6.1", "prettier": "~3.5.3", "typescript": "~5.8.2" }, @@ -39,17 +40,61 @@ "npm": ">=10.0.0" } }, + ".github/shared": { + "name": "@azure-tools/specs-shared", + "dev": true, + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "debug": "^4.4.0", + "js-yaml": "^4.1.0", + "marked": "^16.0.0", + "simple-git": "^3.27.0" + }, + "bin": { + "spec-model": "cmd/spec-model.js" + }, + "devDependencies": { + "@eslint/js": "^9.22.0", + "@tsconfig/node20": "^20.1.4", + "@types/debug": "^4.1.12", + "@types/js-yaml": "^4.0.9", + "@types/node": "^20.0.0", + "@vitest/coverage-v8": "^3.0.7", + "cross-env": "^7.0.3", + "eslint": "^9.22.0", + "globals": "^16.0.0", + "prettier": "~3.5.3", + "semver": "^7.7.1", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + } + }, + ".github/shared/node_modules/marked": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.0.0.tgz", + "integrity": "sha512-MUKMXDjsD/eptB7GPzxo4xcnLS6oo7/RHimUMHEDRhUooPwmN9BEpMl7AEOJv3bmso169wHI2wUF9VQgL7zfmA==", + "dev": true, + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, "eng/tools": { "name": "azure-rest-api-specs-eng-tools", "dev": true, "hasInstallScript": true, "devDependencies": { "@azure-tools/lint-diff": "file:lint-diff", + "@azure-tools/oav-runner": "file:oav-runner", + "@azure-tools/openapi-diff-runner": "file:openapi-diff-runner", "@azure-tools/sdk-suppressions": "file:sdk-suppressions", "@azure-tools/spec-gen-sdk-runner": "file:spec-gen-sdk-runner", - "@azure-tools/specs-model": "file:specs-model", "@azure-tools/suppressions": "file:suppressions", "@azure-tools/tsp-client-tests": "file:tsp-client-tests", + "@azure-tools/typespec-migration-validation": "file:typespec-migration-validation", "@azure-tools/typespec-requirement": "file:typespec-requirement", "@azure-tools/typespec-validation": "file:typespec-validation" } @@ -58,143 +103,42 @@ "name": "@azure-tools/lint-diff", "dev": true, "dependencies": { - "@apidevtools/json-schema-ref-parser": "^9.0.9", - "@azure-tools/openapi-tools-common": "^1.2.2", - "@azure/openapi-markdown": "^0.9.4", - "@microsoft.azure/openapi-validator": "2.2.4", - "@types/js-yaml": "^3.12.10", - "autorest": "3.6.1", + "@apidevtools/json-schema-ref-parser": "^14.0.1", + "@azure-tools/specs-shared": "file:../../../.github/shared", + "@microsoft.azure/openapi-validator": "^2.2.4", + "autorest": "^3.7.2", "axios": "^1.8.3", "change-case": "^5.4.4", - "commonmark": "^0.31.2", - "js-yaml": "^3.14.1" + "deep-eql": "^5.0.2", + "marked": "^16.0.0" }, "bin": { "lint-diff": "cmd/lint-diff.js" }, "devDependencies": { + "@types/deep-eql": "^4.0.2", "@types/node": "^18.19.31", "@vitest/coverage-v8": "^3.0.2", "execa": "^9.5.2", "memfs": "^4.17.0", - "typescript": "~5.6.2", + "prettier": "~3.5.3", + "typescript": "~5.8.2", "vitest": "^3.0.2" }, "engines": { "node": ">= 20.0.0" } }, - "eng/tools/lint-diff/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "eng/tools/lint-diff/node_modules/typescript": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", - "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "eng/tools/node_modules/@apidevtools/json-schema-ref-parser": { - "version": "9.1.2", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.2.tgz", - "integrity": "sha512-r1w81DpR+KyRWd3f+rk6TNqMgedmAxZP5v5KWlXQWlgMUUtyEJch0DKEci1SorPMiSeM8XPl7MZ3miJ60JIpQg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jsdevtools/ono": "^7.1.3", - "@types/json-schema": "^7.0.6", - "call-me-maybe": "^1.0.1", - "js-yaml": "^4.1.0" - } - }, - "eng/tools/node_modules/@types/node": { - "version": "18.19.83", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.83.tgz", - "integrity": "sha512-D69JeR5SfFS5H6FLbUaS0vE4r1dGhmMBbG4Ed6BNS4wkDK8GZjsdCShT5LCN59vOHEUHnFCY9J4aclXlIphMkA==", + "eng/tools/lint-diff/node_modules/@types/node": { + "version": "18.19.112", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.112.tgz", + "integrity": "sha512-i+Vukt9POdS/MBI7YrrkkI5fMfwFtOjphSmt4WXYLfwqsfr6z/HdCx7LqT9M7JktGob8WNgj8nFB4TbGNE4Cog==", "dev": true, "license": "MIT", "dependencies": { "undici-types": "~5.26.4" } }, - "eng/tools/node_modules/@vitest/coverage-v8": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.9.tgz", - "integrity": "sha512-15OACZcBtQ34keIEn19JYTVuMFTlFrClclwWjHo/IRPg/8ELpkgNTl0o7WLP9WO9XGH6+tip9CPYtEOrIDJvBA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.3.0", - "@bcoe/v8-coverage": "^1.0.2", - "debug": "^4.4.0", - "istanbul-lib-coverage": "^3.2.2", - "istanbul-lib-report": "^3.0.1", - "istanbul-lib-source-maps": "^5.0.6", - "istanbul-reports": "^3.1.7", - "magic-string": "^0.30.17", - "magicast": "^0.3.5", - "std-env": "^3.8.0", - "test-exclude": "^7.0.1", - "tinyrainbow": "^2.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@vitest/browser": "3.0.9", - "vitest": "3.0.9" - }, - "peerDependenciesMeta": { - "@vitest/browser": { - "optional": true - } - } - }, - "eng/tools/node_modules/@vitest/mocker": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.9.tgz", - "integrity": "sha512-ryERPIBOnvevAkTq+L1lD+DTFBRcjueL9lOUfXsLfwP92h4e+Heb+PjiqS3/OURWPtywfafK0kj++yDFjWUmrA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "3.0.9", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } - } - }, "eng/tools/node_modules/ajv": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", @@ -212,60 +156,53 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "eng/tools/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "eng/tools/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "eng/tools/node_modules/autorest": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/autorest/-/autorest-3.6.1.tgz", - "integrity": "sha512-tTOnfQq+LAyqnxFrOOnyCEaErXnjRTgduUN7a8LUv2u5deqDlI0zoJllHeIEYDZS2o2Kr1s8pDj2NxaFPOWldg==", + "eng/tools/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, - "hasInstallScript": true, "license": "MIT", - "bin": { - "autorest": "entrypoints/app.js" - }, "engines": { - "node": ">=12.0.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "eng/tools/node_modules/commonmark": { - "version": "0.31.2", - "resolved": "https://registry.npmjs.org/commonmark/-/commonmark-0.31.2.tgz", - "integrity": "sha512-2fRLTyb9r/2835k5cwcAwOj0DEc44FARnMp5veGsJ+mEAZdi52sNopLu07ZyElQUz058H43whzlERDIaaSw4rg==", + "eng/tools/node_modules/cliui": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", + "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", "dev": true, - "license": "BSD-2-Clause", + "license": "ISC", "dependencies": { - "entities": "~3.0.1", - "mdurl": "~1.0.1", - "minimist": "~1.2.8" - }, - "bin": { - "commonmark": "bin/commonmark" + "string-width": "^7.2.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" }, "engines": { - "node": "*" + "node": ">=20" } }, - "eng/tools/node_modules/entities": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", - "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", + "eng/tools/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } + "license": "MIT" }, "eng/tools/node_modules/json-schema-traverse": { "version": "1.0.0", @@ -274,14 +211,27 @@ "dev": true, "license": "MIT" }, + "eng/tools/node_modules/marked": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.0.0.tgz", + "integrity": "sha512-MUKMXDjsD/eptB7GPzxo4xcnLS6oo7/RHimUMHEDRhUooPwmN9BEpMl7AEOJv3bmso169wHI2wUF9VQgL7zfmA==", + "dev": true, + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, "eng/tools/node_modules/minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz", + "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^2.0.1" + "@isaacs/brace-expansion": "^5.0.0" }, "engines": { "node": "20 || >=22" @@ -290,6 +240,40 @@ "url": "https://github.com/sponsors/isaacs" } }, + "eng/tools/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "eng/tools/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "eng/tools/node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", @@ -297,164 +281,67 @@ "dev": true, "license": "MIT" }, - "eng/tools/node_modules/vite": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz", - "integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==", + "eng/tools/node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.25.0", - "postcss": "^8.5.3", - "rollup": "^4.30.1" - }, - "bin": { - "vite": "bin/vite.js" + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": ">=18" }, "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "jiti": ">=1.21.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "eng/tools/node_modules/vitest": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", - "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", + "eng/tools/node_modules/yargs": { + "version": "18.0.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz", + "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.9", - "@vitest/mocker": "3.0.9", - "@vitest/pretty-format": "^3.0.9", - "@vitest/runner": "3.0.9", - "@vitest/snapshot": "3.0.9", - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", - "chai": "^5.2.0", - "debug": "^4.4.0", - "expect-type": "^1.1.0", - "magic-string": "^0.30.17", - "pathe": "^2.0.3", - "std-env": "^3.8.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.2", - "tinypool": "^1.0.2", - "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.9", - "why-is-node-running": "^2.3.0" - }, - "bin": { - "vitest": "vitest.mjs" + "cliui": "^9.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "string-width": "^7.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^22.0.0" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/debug": "^4.1.12", - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.9", - "@vitest/ui": "3.0.9", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/debug": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } + "node": "^20.19.0 || ^22.12.0 || >=23" } }, - "eng/tools/sdk-suppressions": { - "name": "@azure-tools/sdk-suppressions", - "version": "1.0.0", + "eng/tools/node_modules/yargs-parser": { + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz", + "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=23" + } + }, + "eng/tools/oav-runner": { + "name": "@azure-tools/oav-runner", "dev": true, "dependencies": { - "ajv": "^8.17.1", - "lodash": "^4.17.20", - "yaml": "^2.4.2" + "@azure-tools/specs-shared": "file:../../../.github/shared", + "js-yaml": "^4.1.0", + "oav": "^3.5.1", + "simple-git": "^3.27.0" }, "bin": { - "get-sdk-suppressions-label": "cmd/sdk-suppressions-label.js" + "oav-runner": "cmd/oav-runner.js" }, "devDependencies": { - "@types/lodash": "^4.14.161", "@types/node": "^20.0.0", - "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -462,33 +349,20 @@ "node": ">=20.0.0" } }, - "eng/tools/sdk-suppressions/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", + "eng/tools/openapi-diff-runner": { + "name": "@azure-tools/openapi-diff-runner", "dev": true, - "license": "MIT", "dependencies": { - "undici-types": "~6.19.2" - } - }, - "eng/tools/sdk-suppressions/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, - "eng/tools/spec-gen-sdk-runner": { - "name": "@azure-tools/spec-gen-sdk-runner", - "version": "0.0.1", - "dev": true, + "@azure-tools/specs-shared": "file:../../../.github/shared", + "@azure/oad": "0.10.14" + }, "bin": { - "spec-gen-sdk-runner": "cmd/spec-gen-sdk-runner.js" + "openapi-diff-runner": "cmd/openapi-diff-runner.js" }, "devDependencies": { "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -496,36 +370,47 @@ "node": ">=20.0.0" } }, - "eng/tools/spec-gen-sdk-runner/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", + "eng/tools/sdk-suppressions": { + "name": "@azure-tools/sdk-suppressions", + "version": "1.0.0", "dev": true, - "license": "MIT", "dependencies": { - "undici-types": "~6.19.2" + "@azure-tools/specs-shared": "file:../../../.github/shared", + "ajv": "^8.17.1", + "debug": "^4.4.0", + "lodash": "^4.17.20", + "simple-git": "^3.27.0", + "yaml": "^2.4.2" + }, + "bin": { + "get-sdk-suppressions-label": "cmd/sdk-suppressions-label.js" + }, + "devDependencies": { + "@types/debug": "^4.1.12", + "@types/lodash": "^4.14.161", + "@types/node": "^20.0.0", + "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", + "typescript": "~5.8.2", + "vitest": "^3.0.7" + }, + "engines": { + "node": ">=20.0.0" } }, - "eng/tools/spec-gen-sdk-runner/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, - "eng/tools/specs-model": { - "name": "@azure-tools/specs-model", + "eng/tools/spec-gen-sdk-runner": { + "name": "@azure-tools/spec-gen-sdk-runner", + "version": "0.0.1", "dev": true, "bin": { - "get-specs-model": "cmd/get-specs-model.js" + "spec-gen-sdk-runner": "cmd/spec-gen-sdk-runner.js" }, "devDependencies": { "@eslint/js": "^9.21.0", - "@tsconfig/strictest": "^2.0.5", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", "eslint": "^9.21.0", - "eslint-plugin-unicorn": "^58.0.0", + "eslint-plugin-unicorn": "^59.0.0", "prettier": "~3.5.3", "typescript": "~5.8.2", "typescript-eslint": "^8.26.0", @@ -535,23 +420,6 @@ "node": ">=20.0.0" } }, - "eng/tools/specs-model/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "eng/tools/specs-model/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, "eng/tools/suppressions": { "name": "@azure-tools/suppressions", "dev": true, @@ -567,6 +435,7 @@ "devDependencies": { "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -574,29 +443,13 @@ "node": ">=20.0.0" } }, - "eng/tools/suppressions/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "eng/tools/suppressions/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, "eng/tools/tsp-client-tests": { "name": "@azure-tools/tsp-client-tests", "dev": true, "devDependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", "@types/node": "^20.0.0", - "execa": "^9.3.0", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -604,22 +457,40 @@ "node": ">=20.0.0" } }, - "eng/tools/tsp-client-tests/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", + "eng/tools/typespec-migration-validation": { + "name": "@azure-tools/typespec-migration-validation", "dev": true, - "license": "MIT", "dependencies": { - "undici-types": "~6.19.2" + "@azure-tools/typespec-autorest": ">=0.44.0 <1.0.0", + "json-diff": "^1.0.6", + "yargs": "^18.0.0" + }, + "bin": { + "tsmv": "cmd/tsmv.js" + }, + "devDependencies": { + "@types/json-diff": "^1.0.3", + "@types/node": "^18.19.86", + "@types/yargs": "^17.0.33", + "@typescript-eslint/eslint-plugin": "^8.32.1", + "@typescript-eslint/parser": "^8.32.1", + "eslint": "^9.26.0", + "prettier": "~3.5.3", + "typescript": "^5.8.3" + }, + "engines": { + "node": ">=20.0.0" } }, - "eng/tools/tsp-client-tests/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "eng/tools/typespec-migration-validation/node_modules/@types/node": { + "version": "18.19.112", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.112.tgz", + "integrity": "sha512-i+Vukt9POdS/MBI7YrrkkI5fMfwFtOjphSmt4WXYLfwqsfr6z/HdCx7LqT9M7JktGob8WNgj8nFB4TbGNE4Cog==", "dev": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } }, "eng/tools/typespec-requirement": { "name": "@azure-tools/typespec-requirement", @@ -627,6 +498,7 @@ "devDependencies": { "@types/node": "^20.0.0", "execa": "^9.3.0", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -634,29 +506,16 @@ "node": ">=20.0.0" } }, - "eng/tools/typespec-requirement/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "eng/tools/typespec-requirement/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, "eng/tools/typespec-validation": { "name": "@azure-tools/typespec-validation", "dev": true, "dependencies": { + "@azure-tools/specs-shared": "file:../../../.github/shared", + "debug": "^4.4.0", "globby": "^14.0.1", + "picocolors": "^1.1.1", "simple-git": "^3.24.0", + "strip-ansi": "^7.1.0", "suppressions": "file:../suppressions", "yaml": "^2.4.2" }, @@ -664,8 +523,10 @@ "tsv": "cmd/tsv.js" }, "devDependencies": { + "@types/debug": "^4.1.12", "@types/node": "^20.0.0", "@vitest/coverage-v8": "^3.0.7", + "prettier": "~3.5.3", "typescript": "~5.8.2", "vitest": "^3.0.7" }, @@ -673,23 +534,6 @@ "node": ">=20.0.0" } }, - "eng/tools/typespec-validation/node_modules/@types/node": { - "version": "20.17.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz", - "integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "eng/tools/typespec-validation/node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true, - "license": "MIT" - }, "node_modules/@ampproject/remapping": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", @@ -705,13 +549,12 @@ } }, "node_modules/@apidevtools/json-schema-ref-parser": { - "version": "11.7.2", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.2.tgz", - "integrity": "sha512-4gY54eEGEstClvEkGnwVkTkrx0sqwemEFG5OSRRn3tD91XH0+Q8XIkYIfo7IwEWPpJZwILb9GUXeShtplRc/eA==", + "version": "14.0.2", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-14.0.2.tgz", + "integrity": "sha512-5NSrCKPoD+zY/kDmYijF86ehSLD1S7/HX0TOU5WRYCX93JgkfC1hWDVOkvf+qgDVvrzVGZWYtS6lb5vhoio9RA==", "dev": true, "license": "MIT", "dependencies": { - "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.0" }, @@ -758,6 +601,24 @@ "openapi-types": ">=7" } }, + "node_modules/@apidevtools/swagger-parser/node_modules/@apidevtools/json-schema-ref-parser": { + "version": "11.7.2", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.2.tgz", + "integrity": "sha512-4gY54eEGEstClvEkGnwVkTkrx0sqwemEFG5OSRRn3tD91XH0+Q8XIkYIfo7IwEWPpJZwILb9GUXeShtplRc/eA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.15", + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/philsturgeon" + } + }, "node_modules/@apidevtools/swagger-parser/node_modules/ajv": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", @@ -798,13 +659,13 @@ "license": "MIT" }, "node_modules/@autorest/codemodel": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@autorest/codemodel/-/codemodel-4.20.0.tgz", - "integrity": "sha512-Z2GwVwAGNTcfGUmrWT5LJqZv/WDXKBBpxhZrHu6zco/HrEGrqKQcKx5whlLX/GmAB/KmhcOWYr6aIyWomcoisQ==", + "version": "4.20.1", + "resolved": "https://registry.npmjs.org/@autorest/codemodel/-/codemodel-4.20.1.tgz", + "integrity": "sha512-MdI4G0EdQ8yOxGzgT1rCOXxXkCrUQLjVykOvdAyByIgHbnpRop1UzUQuuKmXO8gQPSy7xwYhnfVSgETbHIJZgg==", "dev": true, "license": "MIT", "dependencies": { - "@azure-tools/codegen": "~2.10.0", + "@azure-tools/codegen": "~2.10.1", "js-yaml": "~4.1.0" }, "engines": { @@ -812,9 +673,9 @@ } }, "node_modules/@autorest/core": { - "version": "3.10.4", - "resolved": "https://registry.npmjs.org/@autorest/core/-/core-3.10.4.tgz", - "integrity": "sha512-dcjNuVNknelZ4i2YVgYEVWEPTVJvTbcYka+orkcsb27Fvcyf6Ntno4dS0aiBhQqVMsNIInZOYGG7x/EIW2RGhg==", + "version": "3.10.8", + "resolved": "https://registry.npmjs.org/@autorest/core/-/core-3.10.8.tgz", + "integrity": "sha512-7tj+zPUYu42lrzOZUC2hNaH7Xt53IVaEbWzV23aEYzDhXF0zD9TTpVexFXKTT4idBV0njsAKEKjPMkmQuHLbgQ==", "dev": true, "license": "MIT", "bin": { @@ -826,13 +687,13 @@ } }, "node_modules/@autorest/extension-base": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/@autorest/extension-base/-/extension-base-3.6.0.tgz", - "integrity": "sha512-hE6nmdYu2SA6xlG46lM+/njtz0yNEkhzfkOs7PjrYulnXuBWHo08RdbXHGcecypgNhV2QAQcbV6ar5f1UGX6xQ==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@autorest/extension-base/-/extension-base-3.6.1.tgz", + "integrity": "sha512-FWfO6LM3p+R1dW87wnGdJsCpZw67/h1hj09LsQuY0ywKBKv9lrLAW6AlVPrFyvUUIaCMMgd01U6TifCz/FRG9g==", "dev": true, "license": "MIT", "dependencies": { - "@azure-tools/codegen": "~2.10.0", + "@azure-tools/codegen": "~2.10.1", "js-yaml": "~4.1.0", "vscode-jsonrpc": "^3.5.0" }, @@ -841,44 +702,27 @@ } }, "node_modules/@autorest/openapi-to-typespec": { - "version": "0.10.13", - "resolved": "https://registry.npmjs.org/@autorest/openapi-to-typespec/-/openapi-to-typespec-0.10.13.tgz", - "integrity": "sha512-JphHNCen15WyoWybxsxakLa43YpzI3M/TuM9YZU1ONPgVopCHqH4LaFwZJAy/tdEmhMvE8a7gScrxa5KqKXsBA==", + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/@autorest/openapi-to-typespec/-/openapi-to-typespec-0.11.2.tgz", + "integrity": "sha512-Mpi1+XL1ADVIs5Ug3M65GSAdDMxR9xO+IwHDhbKi7oz68fyUPgBeyJRXAKQm/F/kk9pzyb5ZFyKD+VSHDgkNfw==", "dev": true, "license": "MIT", "dependencies": { - "@autorest/codemodel": "~4.20.0", - "@autorest/extension-base": "~3.6.0", - "@azure-tools/codegen": "~2.10.0", - "@typespec/openapi": "^0.67.1", - "@typespec/openapi3": "^0.67.1", - "@typespec/prettier-plugin-typespec": "^0.67.1", + "@autorest/codemodel": "~4.20.1", + "@autorest/extension-base": "~3.6.1", + "@azure-tools/codegen": "~2.10.1", + "@azure-tools/openapi": "~3.6.1", + "@typespec/prettier-plugin-typespec": "^1.1.0", "change-case-all": "~2.1.0", "lodash": "~4.17.20", "pluralize": "^8.0.0", - "prettier": "~3.1.0" - } - }, - "node_modules/@autorest/openapi-to-typespec/node_modules/prettier": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.1.1.tgz", - "integrity": "sha512-22UbSzg8luF4UuZtzgiUOfcGM8s4tjBv6dJRT7j275NXsy2jb4aJa4NNveul5x4eqlF1wuhuR2RElK71RvmVaw==", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" + "prettier": "~3.5.3" } }, "node_modules/@autorest/schemas": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@autorest/schemas/-/schemas-1.3.5.tgz", - "integrity": "sha512-HUP89Ns/4vDGcMtmFt/fxu+QqKvit/IQ8oBTQjzC6RnJojF+880KoEgTuweTuea2stzRmNyuMiBu4F8AnxdyUA==", + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/@autorest/schemas/-/schemas-1.3.6.tgz", + "integrity": "sha512-kuditGLKhfEjHQxb1aCfs/j2hJL2y8eYEB94smxDd7Qp9beR+oYWwAM/y6PzgkAfk6OrRb3hZ+/NaZTXdoKU5A==", "dev": true, "license": "ISC" }, @@ -897,24 +741,66 @@ } }, "node_modules/@azure-tools/codegen": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@azure-tools/codegen/-/codegen-2.10.0.tgz", - "integrity": "sha512-gdy0at3BUZAAARgiX9Ye6SNCKhcjLs5FNUewa/KV/dMGcPv7mBvbslt5VO3W8wj0n96ifk970aIFaivjacBxeQ==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@azure-tools/codegen/-/codegen-2.10.1.tgz", + "integrity": "sha512-fZfREKjQnBTscjObgK4LuyZNFaofoCNQDNz0jl1i8fYNwCM5EOF9BXwtEtobuEyCpPUNDxQ/KKO65eWzirqk4w==", "dev": true, "license": "MIT", "dependencies": { "@azure-tools/async-io": "~3.0.0", "js-yaml": "~4.1.0", - "semver": "^7.3.5" + "semver": "^7.7.2" }, "engines": { "node": ">=12.0.0" } }, + "node_modules/@azure-tools/json": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@azure-tools/json/-/json-1.3.1.tgz", + "integrity": "sha512-0f4kQ6c513ycuk0Z29Nm09D/3dQHrHkduUW8wsFR1QTQ5uqgdYaDWg5I4cZbA8OkOIrJG73TzB/3G0liVCQ+Fw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure-tools/jsonschema": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@azure-tools/jsonschema/-/jsonschema-1.3.1.tgz", + "integrity": "sha512-P4KnJzZJjCATcn3nRcF5MPja2wrPdP48Us643+0eqGtNBL4O20CFVEm6WFeFeR8JhvNCsZfeayHiE6VOspe1rg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/@azure-tools/lint-diff": { "resolved": "eng/tools/lint-diff", "link": true }, + "node_modules/@azure-tools/oav-runner": { + "resolved": "eng/tools/oav-runner", + "link": true + }, + "node_modules/@azure-tools/openapi": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@azure-tools/openapi/-/openapi-3.6.1.tgz", + "integrity": "sha512-vkIu0CUg09bzxqrlrNHdoOPu9AFhObp0FqG40M2WaF2dcVgLalsUc+wK5s4LpftlZAxcBmzVHna22JhI5/0X9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@azure-tools/json": "~1.3.1", + "@azure-tools/jsonschema": "~1.3.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure-tools/openapi-diff-runner": { + "resolved": "eng/tools/openapi-diff-runner", + "link": true + }, "node_modules/@azure-tools/openapi-tools-common": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/@azure-tools/openapi-tools-common/-/openapi-tools-common-1.2.2.tgz", @@ -979,44 +865,14 @@ } } }, - "node_modules/@azure-tools/rest-api-diff": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@azure-tools/rest-api-diff/-/rest-api-diff-0.2.0.tgz", - "integrity": "sha512-+ZoxSeCLqCvjbdAejEihcauiVWNLnELkcgXWAEyzxzWgE7hLaTowJi7bIA/8fEWLv2kjypRUpFm+PC8jUDpEWQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@azure-tools/typespec-autorest": ">=0.44.0, <1.0.0", - "@azure-tools/typespec-azure-core": ">=0.44.0, <1.0.0", - "acorn": "^8.12.0", - "acorn-walk": "^8.3.3", - "arg": "^4.1.3", - "create-require": "^1.1.1", - "deep-diff": "^1.0.2", - "diff": "^4.0.2", - "dotenv": "^16.4.5", - "make-error": "^1.3.6", - "undici-types": "^5.26.5", - "v8-compile-cache-lib": "^3.0.1", - "yaml": "^2.7.0", - "yargs": "^17.7.2", - "yn": "^3.1.1" - }, - "bin": { - "rest-api-diff": "cmd/rest-api-diff.js" - }, - "engines": { - "node": ">=18.0.0" - } - }, "node_modules/@azure-tools/sdk-suppressions": { "resolved": "eng/tools/sdk-suppressions", "link": true }, "node_modules/@azure-tools/spec-gen-sdk": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@azure-tools/spec-gen-sdk/-/spec-gen-sdk-0.3.2.tgz", - "integrity": "sha512-TE33uEOR6ntjbZjc6vnQzPcfgGlxWZ/nERJEPc6mSJL0bE7Qzt7ABYcsMihZW4Or7oT/N962KMFP4UZAWuVxSg==", + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@azure-tools/spec-gen-sdk/-/spec-gen-sdk-0.8.1.tgz", + "integrity": "sha512-Im994/0TxjYouIsu41YFafIgfqM9DsJRrScLK71yn7uXp8LN2rbmWZGcj4nW0DFu39COQoDr8RoYvEa+fThfZw==", "dev": true, "license": "MIT", "dependencies": { @@ -1056,8 +912,8 @@ "node": ">=10.13.0" } }, - "node_modules/@azure-tools/specs-model": { - "resolved": "eng/tools/specs-model", + "node_modules/@azure-tools/specs-shared": { + "resolved": ".github/shared", "link": true }, "node_modules/@azure-tools/suppressions": { @@ -1079,69 +935,69 @@ "link": true }, "node_modules/@azure-tools/typespec-apiview": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-apiview/-/typespec-apiview-0.6.0.tgz", - "integrity": "sha512-eV+i3xlsKrIJlqnw3gjrnbgUOHlJL2zEJAnMmJuu+iot4P73OCFiPpJO6yUhCvMod1nHkiFVDYh17ifITw2O6A==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-apiview/-/typespec-apiview-0.7.2.tgz", + "integrity": "sha512-4nF24c4agRkLdb9ldMU+zfY37MEvBNB2wm42j6AJAagk89/SLqU0YCNrDBH9IBPLb6UnXCEodIou3TBl6Fu7VA==", "dev": true, "license": "MIT", "engines": { "node": ">=16.0.0" }, "peerDependencies": { - "@typespec/compiler": ">=0.67 <1.0", + "@typespec/compiler": "^1.0.0", "@typespec/versioning": ">=0.67 <1.0" } }, "node_modules/@azure-tools/typespec-autorest": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.53.0.tgz", - "integrity": "sha512-9eAOTU/so8QOigMcy9YKA43jtMxccSP22wa7Is0ZiX59YTcaUDGlpI+6cFfmGH0tATGCOm5TvjyOkdrhNyKrPw==", + "version": "0.57.1", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.57.1.tgz", + "integrity": "sha512-AZ/SlkkxvRT/CJs6wOUbORwKYztU3D8+lR3hcj34vQlR/U3qSTCiCdL6xA4WH9LcYmP9aGjpopprGOEHQiU5SQ==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@azure-tools/typespec-azure-core": "^0.53.0", - "@azure-tools/typespec-azure-resource-manager": "^0.53.0", - "@azure-tools/typespec-client-generator-core": "^0.53.0", - "@typespec/compiler": "^0.67.0", - "@typespec/http": "^0.67.0", - "@typespec/openapi": "^0.67.0", - "@typespec/rest": "^0.67.0", - "@typespec/versioning": "^0.67.0" + "@azure-tools/typespec-azure-core": "^0.57.0", + "@azure-tools/typespec-azure-resource-manager": "^0.57.1", + "@azure-tools/typespec-client-generator-core": "^0.57.2", + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0", + "@typespec/openapi": "^1.1.0", + "@typespec/rest": "^0.71.0", + "@typespec/versioning": "^0.71.0" } }, "node_modules/@azure-tools/typespec-azure-core": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.53.0.tgz", - "integrity": "sha512-zG+DV58ApChmkIIoTZ+XMIRsYLm6DnysMofg0o1UEuY50mS71sjzavcwceT8pXekPHtcXkLyYfdd7FyxirCuUA==", + "version": "0.57.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.57.0.tgz", + "integrity": "sha512-O+F3axrJOJHjYGrQLRWoydHtWjWiXeAlaaILncS0I0xe6kinyFkpn7VIVKxH9ZZ+hPmkDAZybO53656R3PRfUA==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.0", - "@typespec/http": "^0.67.0", - "@typespec/rest": "^0.67.0" + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0", + "@typespec/rest": "^0.71.0" } }, "node_modules/@azure-tools/typespec-azure-portal-core": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-portal-core/-/typespec-azure-portal-core-0.53.0.tgz", - "integrity": "sha512-95P9/aTQOO7tsu+WI/nckegvcgSQkyHk0IzuTaiEi/enwxE+LKQPpLtzDrgJyGM/SoHQb7LM8bEa9Vb1I02XOA==", + "version": "0.57.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-portal-core/-/typespec-azure-portal-core-0.57.0.tgz", + "integrity": "sha512-15SWdzq0LkkBs2m43NZOLYXoUvafS7Cr0GrZ83u8rxRREreNU9Z+/pAw6+DtQhS6uxZN6aEbNK4k2xsjLv9aeQ==", "dev": true, "license": "MIT", "peerDependencies": { - "@azure-tools/typespec-azure-resource-manager": "^0.53.0", - "@typespec/compiler": "^0.67.0" + "@azure-tools/typespec-azure-resource-manager": "^0.57.0", + "@typespec/compiler": "^1.1.0" } }, "node_modules/@azure-tools/typespec-azure-resource-manager": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.53.0.tgz", - "integrity": "sha512-sHeB+HqETYiHoRgcUjr61rxzCn+ITnYrg2gFQ0ExIK/B26hQv50t+VHe1YdrprlqzSvElJD+CtoqQQZffridNw==", + "version": "0.57.2", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.57.2.tgz", + "integrity": "sha512-ljWdjsXpisst4AjnZsU/YMBcqcCGAVnloUaVf39aylFrvakdEQ/Esi/1Jrap05a9C7aXStzzZt3WZ8bPyQXmDw==", "dev": true, "license": "MIT", "dependencies": { @@ -1152,41 +1008,40 @@ "node": ">=20.0.0" }, "peerDependencies": { - "@azure-tools/typespec-azure-core": "^0.53.0", - "@typespec/compiler": "^0.67.0", - "@typespec/http": "^0.67.0", - "@typespec/openapi": "^0.67.0", - "@typespec/rest": "^0.67.0", - "@typespec/versioning": "^0.67.0" + "@azure-tools/typespec-azure-core": "^0.57.0", + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0", + "@typespec/openapi": "^1.1.0", + "@typespec/rest": "^0.71.0", + "@typespec/versioning": "^0.71.0" } }, "node_modules/@azure-tools/typespec-azure-rulesets": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.53.0.tgz", - "integrity": "sha512-TsQeFKNQEG0juFzf0dQt8iikPSXGHNyW9hbDrUNrbnjnFvpxUZlL+1aLyI2hBmhHvJQJpLzHViVgKhXTLLBvIQ==", + "version": "0.57.1", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.57.1.tgz", + "integrity": "sha512-+W+vPGiV4qpqwIeBb4k6sIvDidHxV4dlw4xW9rqoxR/dOTeIsHP6hOATpf8AMsWHcmOwvTn4ThDPhFgBCswvnw==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@azure-tools/typespec-azure-core": "^0.53.0", - "@azure-tools/typespec-azure-resource-manager": "^0.53.0", - "@azure-tools/typespec-client-generator-core": "^0.53.0", - "@typespec/compiler": "^0.67.0" + "@azure-tools/typespec-azure-core": "^0.57.0", + "@azure-tools/typespec-azure-resource-manager": "^0.57.1", + "@azure-tools/typespec-client-generator-core": "^0.57.2", + "@typespec/compiler": "^1.1.0" } }, "node_modules/@azure-tools/typespec-client-generator-cli": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-cli/-/typespec-client-generator-cli-0.16.0.tgz", - "integrity": "sha512-swTTwpkba9mQPXM6921zgynfep6JlruEXQI1Qku7WixbSMsm2of+IIvsBt8SnlnwOHVnEwcqlN1d2wJZjVZzdw==", + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-cli/-/typespec-client-generator-cli-0.23.0.tgz", + "integrity": "sha512-UbrbUjA1XcwX0T7qBClz6BCFcS+nY4ddjNCt6HQFCBvQymns5ZmmslU0/QTo3gfNo2qubE91KW2ZnubwH8AURw==", "dev": true, "license": "MIT", "dependencies": { "@autorest/core": "^3.10.2", "@autorest/openapi-to-typespec": ">=0.10.6 <1.0.0", - "@azure-tools/rest-api-diff": ">=0.1.0 <1.0.0", - "@azure-tools/typespec-autorest": ">=0.44.0 <1.0.0", + "@azure-tools/typespec-autorest": ">=0.53.0 <1.0.0", "@azure/core-rest-pipeline": "^1.12.0", "@types/yargs": "^17.0.32", "autorest": "^3.7.1", @@ -1204,34 +1059,34 @@ "node": "^18.19.0 || >=20.6.0" }, "peerDependencies": { - "@typespec/compiler": ">=0.58.0 <1.0.0" + "@typespec/compiler": "1.0.0-rc.1 || >=1.0.0 <2.0.0" } }, "node_modules/@azure-tools/typespec-client-generator-core": { - "version": "0.53.1", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.53.1.tgz", - "integrity": "sha512-BWHQQ9Kjsk23Rb0eZ6V6HI2Gr20n/LhxAKEuBChCFWLjrFMYyXrHtlUBK6j/9D2VqwjaurRQA2SVXx/wzGyvAg==", + "version": "0.57.3", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.57.3.tgz", + "integrity": "sha512-c/OPeSpKH29jD2Abuli8z7ww5uorplOt9w3KbaQaMSx12u6gWi5vtYhQaFKk9AGiFGDyLPeA0+qo+UQ6t3pRBg==", "dev": true, "license": "MIT", "dependencies": { "change-case": "~5.4.4", "pluralize": "^8.0.0", - "yaml": "~2.7.0" + "yaml": "~2.8.0" }, "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@azure-tools/typespec-azure-core": "^0.53.0", - "@typespec/compiler": "^0.67.0", - "@typespec/events": "^0.67.0", - "@typespec/http": "^0.67.0", - "@typespec/openapi": "^0.67.0", - "@typespec/rest": "^0.67.0", - "@typespec/sse": "^0.67.0", - "@typespec/streams": "^0.67.0", - "@typespec/versioning": "^0.67.0", - "@typespec/xml": "^0.67.0" + "@azure-tools/typespec-azure-core": "^0.57.0", + "@typespec/compiler": "^1.1.0", + "@typespec/events": "^0.71.0", + "@typespec/http": "^1.1.0", + "@typespec/openapi": "^1.1.0", + "@typespec/rest": "^0.71.0", + "@typespec/sse": "^0.71.0", + "@typespec/streams": "^0.71.0", + "@typespec/versioning": "^0.71.0", + "@typespec/xml": "^0.71.0" } }, "node_modules/@azure-tools/typespec-liftr-base": { @@ -1240,6 +1095,10 @@ "integrity": "sha512-xftTTtVjDuxIzugQ9nL/abmttdDM3HAf5HhqKzs9DO0Kl0ZhXQlB2DYlT1hBs/N+IWerMF9k2eKs2RncngA03g==", "dev": true }, + "node_modules/@azure-tools/typespec-migration-validation": { + "resolved": "eng/tools/typespec-migration-validation", + "link": true + }, "node_modules/@azure-tools/typespec-requirement": { "resolved": "eng/tools/typespec-requirement", "link": true @@ -1278,9 +1137,9 @@ } }, "node_modules/@azure/avocado": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/@azure/avocado/-/avocado-0.9.1.tgz", - "integrity": "sha512-cnVDCL0uPnJTGp3wrhv0k7lXCPABbfcXT36Hf3jwSSXuWNQlQEvgZ/wR2kcZFsMnmMCaHWn5o7aTU3lOPFQ7Mg==", + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@azure/avocado/-/avocado-0.9.2.tgz", + "integrity": "sha512-dPbvYi1KpviObTaMNrQuaDPuzdPhe0r9QP9vRQ5kfqozbOpHcDjhWh652h9BaP80Nsb6G+cxKYHyj0XEqrSjEA==", "dev": true, "license": "MIT", "dependencies": { @@ -1341,6 +1200,20 @@ "wrap-ansi": "^6.2.0" } }, + "node_modules/@azure/avocado/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@azure/avocado/node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", @@ -1355,29 +1228,56 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/@azure/avocado/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/@azure/avocado/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.1" + "p-locate": "^4.1.0" }, "engines": { "node": ">=8" } }, - "node_modules/@azure/avocado/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "node_modules/@azure/avocado/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, "license": "MIT", "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@azure/avocado/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@azure/avocado/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" }, "engines": { "node": ">=8" @@ -1518,9 +1418,9 @@ } }, "node_modules/@azure/core-rest-pipeline": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.19.1.tgz", - "integrity": "sha512-zHeoI3NCs53lLBbWNzQycjnYKsA1CVKlnzSNuSFcUDwBp8HHVObePxrM7HaX+Ha5Ks639H7chNC9HOaIhNS03w==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.21.0.tgz", + "integrity": "sha512-a4MBwe/5WKbq9MIxikzgxLBbruC5qlkFYlBdI7Ev50Y7ib5Vo/Jvt5jnJo7NaWeJ908LCHL0S1Us4UMf1VoTfg==", "dev": true, "license": "MIT", "dependencies": { @@ -1529,8 +1429,7 @@ "@azure/core-tracing": "^1.0.1", "@azure/core-util": "^1.11.0", "@azure/logger": "^1.0.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.0", + "@typespec/ts-http-runtime": "^0.2.3", "tslib": "^2.6.2" }, "engines": { @@ -1551,13 +1450,14 @@ } }, "node_modules/@azure/core-util": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.11.0.tgz", - "integrity": "sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g==", + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.12.0.tgz", + "integrity": "sha512-13IyjTQgABPARvG90+N2dXpC+hwp466XCdQXPCRlbWHgd3SJd5Q1VvaBGv6k1BIa4MQm6hAF1UBU1m8QUxV8sQ==", "dev": true, "license": "MIT", "dependencies": { "@azure/abort-controller": "^2.0.0", + "@typespec/ts-http-runtime": "^0.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1565,12 +1465,13 @@ } }, "node_modules/@azure/logger": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.4.tgz", - "integrity": "sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.2.0.tgz", + "integrity": "sha512-0hKEzLhpw+ZTAfNJyRrn6s+V0nDWzXk9OjBr2TiGIu0OfMr5s2V4FpKLTAK3Ca5r5OKLbf4hkOGDPyiRjie/jA==", "dev": true, "license": "MIT", "dependencies": { + "@typespec/ts-http-runtime": "^0.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1639,6 +1540,273 @@ "dev": true, "license": "0BSD" }, + "node_modules/@azure/oad": { + "version": "0.10.14", + "resolved": "https://registry.npmjs.org/@azure/oad/-/oad-0.10.14.tgz", + "integrity": "sha512-lfiIsacGPoRFdossLRPptHw2OW7XBcnLCpZt3bTb/BWTR+3K1/M4t+BUOXB6aKA+iQQjurMkw4iksUWzSUVLnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ts-common/fs": "^0.2.0", + "@ts-common/iterator": "^0.3.6", + "@ts-common/json": "^0.3.1", + "@ts-common/json-parser": "^0.9.0", + "@ts-common/source-map": "^0.5.0", + "@ts-common/string-map": "^0.3.0", + "acorn": "^5.7.4", + "autorest": "^3.6.1", + "glob": "^7.1.3", + "js-yaml": "^3.13.1", + "json-pointer": "^0.6.2", + "json-refs": "^3.0.15", + "kind-of": "^6.0.3", + "lodash": "^4.17.21", + "minimist": "^1.2.8", + "request": "^2.88.0", + "set-value": "^4.1.0", + "shell-quote": "^1.8.3", + "source-map": "^0.7.4", + "tslib": "^2.6.3", + "winston": "^3.13.0", + "yargs": "^13.2.2", + "yargs-parser": "^13.1.2" + }, + "bin": { + "oad": "dist/cli.js" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/oad/node_modules/@ts-common/iterator": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@ts-common/iterator/-/iterator-0.3.6.tgz", + "integrity": "sha512-nNdcleTj3qLlchH17HI/xqOc6sNgOqJ5DdRR0nOEVdJVZCo5bfqoQTu6+Q9ZwMhuETuR2d86MSlmaL2FVHnPjQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@azure/oad/node_modules/acorn": { + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", + "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/@azure/oad/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@azure/oad/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@azure/oad/node_modules/cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "node_modules/@azure/oad/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@azure/oad/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/oad/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@azure/oad/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@azure/oad/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/oad/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@azure/oad/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/oad/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@azure/oad/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@azure/oad/node_modules/string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/oad/node_modules/wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/oad/node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/@azure/oad/node_modules/yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + } + }, + "node_modules/@azure/oad/node_modules/yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, "node_modules/@azure/openapi-markdown": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/@azure/openapi-markdown/-/openapi-markdown-0.9.4.tgz", @@ -1727,24 +1895,24 @@ "license": "MIT" }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, "license": "MIT", "engines": { @@ -1752,9 +1920,9 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "dev": true, "license": "MIT", "engines": { @@ -1762,13 +1930,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", - "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.7.tgz", + "integrity": "sha512-qnzXzDXdr/po3bOTbTIQZ7+TxNKxpkN5IifVLXS+r7qwynkZfPyjZfE7hCXbo7IoO9TNcSyibgONsf2HauUd3Q==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.27.0" + "@babel/types": "^7.27.7" }, "bin": { "parser": "bin/babel-parser.js" @@ -1778,14 +1946,14 @@ } }, "node_modules/@babel/types": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", - "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.7.tgz", + "integrity": "sha512-8OLQgDScAOHXnAz2cV+RfzzNMipuLVBz2biuAJFMV9bfkNf393je3VM8CLkjQodW5+iWsSJdSgSWT6rsZoXHPw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -1825,9 +1993,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", - "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", + "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", "cpu": [ "ppc64" ], @@ -1842,9 +2010,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", - "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz", + "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", "cpu": [ "arm" ], @@ -1859,9 +2027,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", - "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", + "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", "cpu": [ "arm64" ], @@ -1876,9 +2044,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", - "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz", + "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", "cpu": [ "x64" ], @@ -1893,9 +2061,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", - "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", + "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", "cpu": [ "arm64" ], @@ -1910,9 +2078,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", - "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", + "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", "cpu": [ "x64" ], @@ -1927,9 +2095,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", - "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", + "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", "cpu": [ "arm64" ], @@ -1944,9 +2112,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", - "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", + "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", "cpu": [ "x64" ], @@ -1961,9 +2129,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", - "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", + "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", "cpu": [ "arm" ], @@ -1978,9 +2146,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", - "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", + "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", "cpu": [ "arm64" ], @@ -1995,9 +2163,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", - "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", + "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", "cpu": [ "ia32" ], @@ -2012,9 +2180,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", - "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", + "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", "cpu": [ "loong64" ], @@ -2029,9 +2197,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", - "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", + "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", "cpu": [ "mips64el" ], @@ -2046,9 +2214,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", - "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", + "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", "cpu": [ "ppc64" ], @@ -2063,9 +2231,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", - "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", + "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", "cpu": [ "riscv64" ], @@ -2080,9 +2248,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", - "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", + "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", "cpu": [ "s390x" ], @@ -2097,9 +2265,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", - "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", + "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", "cpu": [ "x64" ], @@ -2114,9 +2282,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", - "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", + "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", "cpu": [ "arm64" ], @@ -2131,9 +2299,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", - "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", + "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", "cpu": [ "x64" ], @@ -2148,9 +2316,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", - "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", + "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", "cpu": [ "arm64" ], @@ -2165,9 +2333,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", - "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", + "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", "cpu": [ "x64" ], @@ -2182,9 +2350,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", - "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", + "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", "cpu": [ "x64" ], @@ -2199,9 +2367,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", - "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", + "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", "cpu": [ "arm64" ], @@ -2216,9 +2384,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", - "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", + "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", "cpu": [ "ia32" ], @@ -2233,9 +2401,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", - "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", + "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", "cpu": [ "x64" ], @@ -2250,9 +2418,9 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.1.tgz", - "integrity": "sha512-soEIOALTfTK6EjmKMMoLugwaP0rzkad90iIWd1hMO9ARkSAyjfMfkRRhLvD5qH7vvM0Cg72pieUfR6yh6XxC4w==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", "dev": true, "license": "MIT", "dependencies": { @@ -2292,9 +2460,9 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", - "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.20.1.tgz", + "integrity": "sha512-OL0RJzC/CBzli0DrrR31qzj6d6i6Mm3HByuhflhl4LOBiWxN+3i6/t/ZQQNii4tjksXi8r2CRW1wMpWA2ULUEw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -2306,34 +2474,10 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/config-array/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/@eslint/config-helpers": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.0.tgz", - "integrity": "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==", + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.3.tgz", + "integrity": "sha512-u180qk2Um1le4yf0ruXH3PYFeEZeYC3p/4wCTKrr2U1CmGdzGi3KtY0nuPDH48UJxlKCC5RDzbcbh4X0XlqgHg==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2341,9 +2485,9 @@ } }, "node_modules/@eslint/core": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", - "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", + "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -2377,48 +2521,30 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/eslintrc/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", "dev": true, "license": "MIT", "engines": { - "node": ">= 4" - } - }, - "node_modules/@eslint/eslintrc/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" + "node": ">=18" }, - "engines": { - "node": "*" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@eslint/js": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.23.0.tgz", - "integrity": "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==", + "version": "9.29.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.29.0.tgz", + "integrity": "sha512-3PIF4cBw/y+1u2EazflInpV+lYsSG0aByVIQzAgb1m1MhHFSbqTyNqtBKHgWf/9Ykud+DhILS9EGkmekVhbKoQ==", "dev": true, "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { @@ -2432,19 +2558,41 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", - "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.12.0", + "@eslint/core": "^0.15.1", "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@ewoudenberg/difflib": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@ewoudenberg/difflib/-/difflib-0.1.0.tgz", + "integrity": "sha512-OU5P5mJyD3OoWYMWY+yIgwvgNS9cFAU10f+DDuvtogcWQOoJIsQ4Hy2McSfUfhKjq8L0FuWVb4Rt7kgA+XK86A==", + "dev": true, + "dependencies": { + "heap": ">= 0.2.0" + } + }, "node_modules/@faker-js/faker": { "version": "5.5.3", "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-5.5.3.tgz", @@ -2506,9 +2654,9 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.2.tgz", - "integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2519,175 +2667,506 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@inquirer/figures": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.11.tgz", - "integrity": "sha512-eOg92lvrn/aRUqbxRyvpEWnrvRuTYRifixHkYVpJiygTgVSBIHDqLh0SrMQXkafvULg3ck11V7xvR+zcgvpHFw==", + "node_modules/@inquirer/checkbox": { + "version": "4.1.8", + "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.8.tgz", + "integrity": "sha512-d/QAsnwuHX2OPolxvYcgSj7A9DO9H6gVOy2DvBTx+P2LH2iRTo/RSGV3iwCzW024nP9hw98KIuDmdyhZQj1UQg==", "dev": true, "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, "engines": { "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "node_modules/@inquirer/confirm": { + "version": "5.1.12", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.12.tgz", + "integrity": "sha512-dpq+ielV9/bqgXRUbNH//KsY6WEw9DrGPmipkpmgC1Y46cwuBTNx7PXFWTjc3MQ+urcc0QxoVHcMI0FW4Ok0hg==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7" }, "engines": { - "node": ">=12" + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "node_modules/@inquirer/core": { + "version": "10.1.13", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.13.tgz", + "integrity": "sha512-1viSxebkYN2nJULlzCxES6G9/stgHSepZ9LqqfdIGPHj5OHhiBUXVS0a6R0bEC2A+VL4D9w6QB66ebCr6HGllA==", "dev": true, "license": "MIT", + "dependencies": { + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "cli-width": "^4.1.0", + "mute-stream": "^2.0.0", + "signal-exit": "^4.1.0", + "wrap-ansi": "^6.2.0", + "yoctocolors-cjs": "^2.1.2" + }, "engines": { - "node": ">=12" + "node": ">=18" }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/cliui/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "node_modules/@inquirer/editor": { + "version": "4.2.13", + "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.13.tgz", + "integrity": "sha512-WbicD9SUQt/K8O5Vyk9iC2ojq5RHoCLK6itpp2fHsWe44VxxcA9z3GTWlvjSTGmMQpZr+lbVmrxdHcumJoLbMA==", "dev": true, "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7", + "external-editor": "^3.1.0" + }, "engines": { - "node": ">=12" + "node": ">=18" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "node_modules/@inquirer/expand": { + "version": "4.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.15.tgz", + "integrity": "sha512-4Y+pbr/U9Qcvf+N/goHzPEXiHH8680lM3Dr3Y9h9FFw4gHS+zVpbj8LfbKWIb/jayIB4aSO4pWiBTrBYWkvi5A==", "dev": true, "license": "MIT", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" }, "engines": { - "node": ">=12" + "node": ">=18" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "node_modules/@inquirer/figures": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.12.tgz", + "integrity": "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ==", "dev": true, "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "node": ">=18" } }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "node_modules/@inquirer/input": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.1.12.tgz", + "integrity": "sha512-xJ6PFZpDjC+tC1P8ImGprgcsrzQRsUh9aH3IZixm1lAZFK49UGHxM3ltFfuInN2kPYNfyoPRh+tU4ftsjPLKqQ==", "dev": true, "license": "MIT", "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7" }, "engines": { - "node": ">=12" + "node": ">=18" }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "node_modules/@inquirer/number": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.15.tgz", + "integrity": "sha512-xWg+iYfqdhRiM55MvqiTCleHzszpoigUpN5+t1OMcRkJrUrw7va3AzXaxvS+Ak7Gny0j2mFSTv2JJj8sMtbV2g==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "minipass": "^7.0.4" + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7" }, "engines": { - "node": ">=18.0.0" + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "node_modules/@inquirer/password": { + "version": "4.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.15.tgz", + "integrity": "sha512-75CT2p43DGEnfGTaqFpbDC2p2EEMrq0S+IRrf9iJvYreMy5mAWj087+mdKyLHapUEPLjN10mNvABpGbk8Wdraw==", "dev": true, "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2" + }, "engines": { - "node": ">=8" + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "node_modules/@inquirer/prompts": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.5.3.tgz", + "integrity": "sha512-8YL0WiV7J86hVAxrh3fE5mDCzcTDe1670unmJRz6ArDgN+DBK1a0+rbnNWp4DUB5rPMwqD5ZP6YHl9KK1mbZRg==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.24" + "@inquirer/checkbox": "^4.1.8", + "@inquirer/confirm": "^5.1.12", + "@inquirer/editor": "^4.2.13", + "@inquirer/expand": "^4.0.15", + "@inquirer/input": "^4.1.12", + "@inquirer/number": "^3.0.15", + "@inquirer/password": "^4.0.15", + "@inquirer/rawlist": "^4.1.3", + "@inquirer/search": "^3.0.15", + "@inquirer/select": "^4.2.3" }, "engines": { - "node": ">=6.0.0" + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "node_modules/@inquirer/rawlist": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.3.tgz", + "integrity": "sha512-7XrV//6kwYumNDSsvJIPeAqa8+p7GJh7H5kRuxirct2cgOcSWwwNGoXDRgpNFbY/MG2vQ4ccIWCi8+IXXyFMZA==", "dev": true, "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" + }, "engines": { - "node": ">=6.0.0" + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } } }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "node_modules/@inquirer/search": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.15.tgz", + "integrity": "sha512-YBMwPxYBrADqyvP4nNItpwkBnGGglAvCLVW8u4pRmmvOsHUtCAUIMbUrLX5B3tFL1/WsLGdQ2HNzkqswMs5Uaw==", "dev": true, "license": "MIT", - "engines": { - "node": ">=6.0.0" + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/select": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.2.3.tgz", + "integrity": "sha512-OAGhXU0Cvh0PhLz9xTF/kx6g6x+sP+PcyTiLvCrewI99P3BBeexD+VbuwkNDvqGkk3y2h5ZiWLeRP7BFlhkUDg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.1.13", + "@inquirer/figures": "^1.0.12", + "@inquirer/type": "^3.0.7", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/type": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.7.tgz", + "integrity": "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { @@ -2795,9 +3274,9 @@ } }, "node_modules/@jsonjoy.com/util": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-1.5.0.tgz", - "integrity": "sha512-ojoNsrIuPI9g6o8UxhraZQSyF2ByJanAY4cTFbc8Mf2AXEF4aQRGY1dJxyJpuyav8r9FGflEt/Ff3u5Nt6YMPA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-1.6.0.tgz", + "integrity": "sha512-sw/RMbehRhN68WRtcKCpQOPfnH6lLP4GJfqzi3iYej8tnzpZUDr6UkZYJjcjjC0FWEJOJbyM3PTIwxucUmDG2A==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2871,20 +3350,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@microsoft.azure/openapi-validator-core/node_modules/jsonpath-plus": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-8.1.0.tgz", - "integrity": "sha512-qVTiuKztFGw0dGhYi3WNqvddx3/SHtyDT0xJaeyz4uP0d1tkpG+0y5uYQ4OcIo1TLAz3PE/qDOW9F0uDt3+CTw==", - "dev": true, - "license": "MIT", - "bin": { - "jsonpath": "bin/jsonpath-cli.js", - "jsonpath-plus": "bin/jsonpath-cli.js" - }, - "engines": { - "node": ">=14.0.0" - } - }, "node_modules/@microsoft.azure/openapi-validator-rulesets": { "version": "2.1.7", "resolved": "https://registry.npmjs.org/@microsoft.azure/openapi-validator-rulesets/-/openapi-validator-rulesets-2.1.7.tgz", @@ -2921,18 +3386,17 @@ "js-yaml": "^4.1.0" } }, - "node_modules/@microsoft.azure/openapi-validator-rulesets/node_modules/jsonpath-plus": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-8.1.0.tgz", - "integrity": "sha512-qVTiuKztFGw0dGhYi3WNqvddx3/SHtyDT0xJaeyz4uP0d1tkpG+0y5uYQ4OcIo1TLAz3PE/qDOW9F0uDt3+CTw==", + "node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", "dev": true, "license": "MIT", - "bin": { - "jsonpath": "bin/jsonpath-cli.js", - "jsonpath-plus": "bin/jsonpath-cli.js" - }, "engines": { - "node": ">=14.0.0" + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" } }, "node_modules/@nodelib/fs.scandir": { @@ -2983,6 +3447,16 @@ "node": ">=8.0.0" } }, + "node_modules/@paralleldrive/cuid2": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz", + "integrity": "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@noble/hashes": "^1.1.5" + } + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -3049,9 +3523,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz", - "integrity": "sha512-l7StVw6WAa8l3vA1ov80jyetOAEo1FtHvZDbzXDO/02Sq/QVvqlHkYoFwDJPIMj0GKiistsBudfx5tGFnwYWDQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.44.1.tgz", + "integrity": "sha512-JAcBr1+fgqx20m7Fwe1DxPUl/hPkee6jA6Pl7n1v2EFiktAHenTaXl5aIFjUIEsfn9w3HE4gK1lEgNGMzBDs1w==", "cpu": [ "arm" ], @@ -3063,9 +3537,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.37.0.tgz", - "integrity": "sha512-6U3SlVyMxezt8Y+/iEBcbp945uZjJwjZimu76xoG7tO1av9VO691z8PkhzQ85ith2I8R2RddEPeSfcbyPfD4hA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.44.1.tgz", + "integrity": "sha512-RurZetXqTu4p+G0ChbnkwBuAtwAbIwJkycw1n6GvlGlBuS4u5qlr5opix8cBAYFJgaY05TWtM+LaoFggUmbZEQ==", "cpu": [ "arm64" ], @@ -3077,9 +3551,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.37.0.tgz", - "integrity": "sha512-+iTQ5YHuGmPt10NTzEyMPbayiNTcOZDWsbxZYR1ZnmLnZxG17ivrPSWFO9j6GalY0+gV3Jtwrrs12DBscxnlYA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.44.1.tgz", + "integrity": "sha512-fM/xPesi7g2M7chk37LOnmnSTHLG/v2ggWqKj3CCA1rMA4mm5KVBT1fNoswbo1JhPuNNZrVwpTvlCVggv8A2zg==", "cpu": [ "arm64" ], @@ -3091,9 +3565,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.37.0.tgz", - "integrity": "sha512-m8W2UbxLDcmRKVjgl5J/k4B8d7qX2EcJve3Sut7YGrQoPtCIQGPH5AMzuFvYRWZi0FVS0zEY4c8uttPfX6bwYQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.44.1.tgz", + "integrity": "sha512-gDnWk57urJrkrHQ2WVx9TSVTH7lSlU7E3AFqiko+bgjlh78aJ88/3nycMax52VIVjIm3ObXnDL2H00e/xzoipw==", "cpu": [ "x64" ], @@ -3105,9 +3579,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.37.0.tgz", - "integrity": "sha512-FOMXGmH15OmtQWEt174v9P1JqqhlgYge/bUjIbiVD1nI1NeJ30HYT9SJlZMqdo1uQFyt9cz748F1BHghWaDnVA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.44.1.tgz", + "integrity": "sha512-wnFQmJ/zPThM5zEGcnDcCJeYJgtSLjh1d//WuHzhf6zT3Md1BvvhJnWoy+HECKu2bMxaIcfWiu3bJgx6z4g2XA==", "cpu": [ "arm64" ], @@ -3119,9 +3593,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.37.0.tgz", - "integrity": "sha512-SZMxNttjPKvV14Hjck5t70xS3l63sbVwl98g3FlVVx2YIDmfUIy29jQrsw06ewEYQ8lQSuY9mpAPlmgRD2iSsA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.44.1.tgz", + "integrity": "sha512-uBmIxoJ4493YATvU2c0upGz87f99e3wop7TJgOA/bXMFd2SvKCI7xkxY/5k50bv7J6dw1SXT4MQBQSLn8Bb/Uw==", "cpu": [ "x64" ], @@ -3133,9 +3607,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.37.0.tgz", - "integrity": "sha512-hhAALKJPidCwZcj+g+iN+38SIOkhK2a9bqtJR+EtyxrKKSt1ynCBeqrQy31z0oWU6thRZzdx53hVgEbRkuI19w==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.44.1.tgz", + "integrity": "sha512-n0edDmSHlXFhrlmTK7XBuwKlG5MbS7yleS1cQ9nn4kIeW+dJH+ExqNgQ0RrFRew8Y+0V/x6C5IjsHrJmiHtkxQ==", "cpu": [ "arm" ], @@ -3147,9 +3621,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.37.0.tgz", - "integrity": "sha512-jUb/kmn/Gd8epbHKEqkRAxq5c2EwRt0DqhSGWjPFxLeFvldFdHQs/n8lQ9x85oAeVb6bHcS8irhTJX2FCOd8Ag==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.44.1.tgz", + "integrity": "sha512-8WVUPy3FtAsKSpyk21kV52HCxB+me6YkbkFHATzC2Yd3yuqHwy2lbFL4alJOLXKljoRw08Zk8/xEj89cLQ/4Nw==", "cpu": [ "arm" ], @@ -3161,9 +3635,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.37.0.tgz", - "integrity": "sha512-oNrJxcQT9IcbcmKlkF+Yz2tmOxZgG9D9GRq+1OE6XCQwCVwxixYAa38Z8qqPzQvzt1FCfmrHX03E0pWoXm1DqA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.44.1.tgz", + "integrity": "sha512-yuktAOaeOgorWDeFJggjuCkMGeITfqvPgkIXhDqsfKX8J3jGyxdDZgBV/2kj/2DyPaLiX6bPdjJDTu9RB8lUPQ==", "cpu": [ "arm64" ], @@ -3175,9 +3649,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.37.0.tgz", - "integrity": "sha512-pfxLBMls+28Ey2enpX3JvjEjaJMBX5XlPCZNGxj4kdJyHduPBXtxYeb8alo0a7bqOoWZW2uKynhHxF/MWoHaGQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.44.1.tgz", + "integrity": "sha512-W+GBM4ifET1Plw8pdVaecwUgxmiH23CfAUj32u8knq0JPFyK4weRy6H7ooxYFD19YxBulL0Ktsflg5XS7+7u9g==", "cpu": [ "arm64" ], @@ -3189,9 +3663,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.37.0.tgz", - "integrity": "sha512-yCE0NnutTC/7IGUq/PUHmoeZbIwq3KRh02e9SfFh7Vmc1Z7atuJRYWhRME5fKgT8aS20mwi1RyChA23qSyRGpA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.44.1.tgz", + "integrity": "sha512-1zqnUEMWp9WrGVuVak6jWTl4fEtrVKfZY7CvcBmUUpxAJ7WcSowPSAWIKa/0o5mBL/Ij50SIf9tuirGx63Ovew==", "cpu": [ "loong64" ], @@ -3203,9 +3677,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.37.0.tgz", - "integrity": "sha512-NxcICptHk06E2Lh3a4Pu+2PEdZ6ahNHuK7o6Np9zcWkrBMuv21j10SQDJW3C9Yf/A/P7cutWoC/DptNLVsZ0VQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.44.1.tgz", + "integrity": "sha512-Rl3JKaRu0LHIx7ExBAAnf0JcOQetQffaw34T8vLlg9b1IhzcBgaIdnvEbbsZq9uZp3uAH+JkHd20Nwn0h9zPjA==", "cpu": [ "ppc64" ], @@ -3217,9 +3691,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.37.0.tgz", - "integrity": "sha512-PpWwHMPCVpFZLTfLq7EWJWvrmEuLdGn1GMYcm5MV7PaRgwCEYJAwiN94uBuZev0/J/hFIIJCsYw4nLmXA9J7Pw==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.44.1.tgz", + "integrity": "sha512-j5akelU3snyL6K3N/iX7otLBIl347fGwmd95U5gS/7z6T4ftK288jKq3A5lcFKcx7wwzb5rgNvAg3ZbV4BqUSw==", "cpu": [ "riscv64" ], @@ -3231,9 +3705,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.37.0.tgz", - "integrity": "sha512-DTNwl6a3CfhGTAOYZ4KtYbdS8b+275LSLqJVJIrPa5/JuIufWWZ/QFvkxp52gpmguN95eujrM68ZG+zVxa8zHA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.44.1.tgz", + "integrity": "sha512-ppn5llVGgrZw7yxbIm8TTvtj1EoPgYUAbfw0uDjIOzzoqlZlZrLJ/KuiE7uf5EpTpCTrNt1EdtzF0naMm0wGYg==", "cpu": [ "riscv64" ], @@ -3245,9 +3719,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.37.0.tgz", - "integrity": "sha512-hZDDU5fgWvDdHFuExN1gBOhCuzo/8TMpidfOR+1cPZJflcEzXdCy1LjnklQdW8/Et9sryOPJAKAQRw8Jq7Tg+A==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.44.1.tgz", + "integrity": "sha512-Hu6hEdix0oxtUma99jSP7xbvjkUM/ycke/AQQ4EC5g7jNRLLIwjcNwaUy95ZKBJJwg1ZowsclNnjYqzN4zwkAw==", "cpu": [ "s390x" ], @@ -3259,9 +3733,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.37.0.tgz", - "integrity": "sha512-pKivGpgJM5g8dwj0ywBwe/HeVAUSuVVJhUTa/URXjxvoyTT/AxsLTAbkHkDHG7qQxLoW2s3apEIl26uUe08LVQ==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.44.1.tgz", + "integrity": "sha512-EtnsrmZGomz9WxK1bR5079zee3+7a+AdFlghyd6VbAjgRJDbTANJ9dcPIPAi76uG05micpEL+gPGmAKYTschQw==", "cpu": [ "x64" ], @@ -3273,9 +3747,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.37.0.tgz", - "integrity": "sha512-E2lPrLKE8sQbY/2bEkVTGDEk4/49UYRVWgj90MY8yPjpnGBQ+Xi1Qnr7b7UIWw1NOggdFQFOLZ8+5CzCiz143w==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.44.1.tgz", + "integrity": "sha512-iAS4p+J1az6Usn0f8xhgL4PaU878KEtutP4hqw52I4IO6AGoyOkHCxcc4bqufv1tQLdDWFx8lR9YlwxKuv3/3g==", "cpu": [ "x64" ], @@ -3287,9 +3761,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.37.0.tgz", - "integrity": "sha512-Jm7biMazjNzTU4PrQtr7VS8ibeys9Pn29/1bm4ph7CP2kf21950LgN+BaE2mJ1QujnvOc6p54eWWiVvn05SOBg==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.44.1.tgz", + "integrity": "sha512-NtSJVKcXwcqozOl+FwI41OH3OApDyLk3kqTJgx8+gp6On9ZEt5mYhIsKNPGuaZr3p9T6NWPKGU/03Vw4CNU9qg==", "cpu": [ "arm64" ], @@ -3301,9 +3775,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.37.0.tgz", - "integrity": "sha512-e3/1SFm1OjefWICB2Ucstg2dxYDkDTZGDYgwufcbsxTHyqQps1UQf33dFEChBNmeSsTOyrjw2JJq0zbG5GF6RA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.44.1.tgz", + "integrity": "sha512-JYA3qvCOLXSsnTR3oiyGws1Dm0YTuxAAeaYGVlGpUsHqloPcFjPg+X0Fj2qODGLNwQOAcCiQmHub/V007kiH5A==", "cpu": [ "ia32" ], @@ -3315,9 +3789,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.37.0.tgz", - "integrity": "sha512-LWbXUBwn/bcLx2sSsqy7pK5o+Nr+VCoRoAohfJ5C/aBio9nfJmGQqHAhU6pwxV/RmyTk5AqdySma7uwWGlmeuA==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.44.1.tgz", + "integrity": "sha512-J8o22LuF0kTe7m+8PvW9wk3/bRq5+mRo5Dqo6+vXb7otCm3TPhYOJqOaQtGU9YMWQSL3krMnoOxMr0+9E6F3Ug==", "cpu": [ "x64" ], @@ -3503,9 +3977,9 @@ } }, "node_modules/@stoplight/spectral-core": { - "version": "1.19.5", - "resolved": "https://registry.npmjs.org/@stoplight/spectral-core/-/spectral-core-1.19.5.tgz", - "integrity": "sha512-i+njdliW7bAHGsHEgDvH0To/9IxiYiBELltkZ7ASVy4i+WXtZ40lQXpeRQRwePrBcSgQl0gcZFuKX10nmSHtbw==", + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/@stoplight/spectral-core/-/spectral-core-1.20.0.tgz", + "integrity": "sha512-5hBP81nCC1zn1hJXL/uxPNRKNcB+/pEIHgCjPRpl/w/qy9yC9ver04tw1W0l/PMiv0UeB5dYgozXVQ4j5a6QQQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -3593,17 +4067,6 @@ "ajv": "^8.0.1" } }, - "node_modules/@stoplight/spectral-core/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, "node_modules/@stoplight/spectral-core/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", @@ -3611,19 +4074,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@stoplight/spectral-core/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/@stoplight/spectral-formats": { "version": "1.8.2", "resolved": "https://registry.npmjs.org/@stoplight/spectral-formats/-/spectral-formats-1.8.2.tgz", @@ -3641,9 +4091,9 @@ } }, "node_modules/@stoplight/spectral-functions": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@stoplight/spectral-functions/-/spectral-functions-1.9.4.tgz", - "integrity": "sha512-+dgu7QQ1JIZFsNLhNbQLPA9tniIT3KjOc9ORv0LYSCLvZjkWT2bN7vgmathbXsbmhnmhvl15H9sRqUIqzi+qoQ==", + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@stoplight/spectral-functions/-/spectral-functions-1.10.1.tgz", + "integrity": "sha512-obu8ZfoHxELOapfGsCJixKZXZcffjg+lSoNuttpmUFuDzVLT3VmH8QkPXfOGOL5Pz80BR35ClNAToDkdnYIURg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -3966,6 +4416,13 @@ "tslib": "^1.9.3" } }, + "node_modules/@ts-common/fs/node_modules/@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "dev": true, + "license": "MIT" + }, "node_modules/@ts-common/fs/node_modules/tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", @@ -4117,10 +4574,10 @@ } } }, - "node_modules/@tsconfig/strictest": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@tsconfig/strictest/-/strictest-2.0.5.tgz", - "integrity": "sha512-ec4tjL2Rr0pkZ5hww65c+EEPYwxOi4Ryv+0MtjeaSQRJyq322Q27eOQiFbuNgw2hpL4hB1/W/HBGk3VKS43osg==", + "node_modules/@tsconfig/node20": { + "version": "20.1.6", + "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.6.tgz", + "integrity": "sha512-sz+Hqx9zwZDpZIV871WSbUzSqNIsXzghZydypnfgzPKLltVJfkINfUeTct31n/tTSa9ZE1ZOfKdRre1uHHquYQ==", "dev": true, "license": "MIT" }, @@ -4134,6 +4591,16 @@ "@types/retry": "*" } }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, "node_modules/@types/commonmark": { "version": "0.27.9", "resolved": "https://registry.npmjs.org/@types/commonmark/-/commonmark-0.27.9.tgz", @@ -4141,6 +4608,23 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/es-aggregate-error": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/es-aggregate-error/-/es-aggregate-error-1.0.6.tgz", @@ -4152,16 +4636,23 @@ } }, "node_modules/@types/estree": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", - "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, "node_modules/@types/js-yaml": { - "version": "3.12.10", - "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-3.12.10.tgz", - "integrity": "sha512-/Mtaq/wf+HxXpvhzFYzrzCqNRcA958sW++7JOFC8nPrZcvfi/TrzOaaGbvt27ltJB2NQbHVAg5a1wUCsyMH7NA==", + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-diff": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@types/json-diff/-/json-diff-1.0.3.tgz", + "integrity": "sha512-Qvxm8fpRMv/1zZR3sQWImeRK2mBYJji20xF51Fq9Gt//Ed18u0x6/FNLogLS1xhfUWTEmDyqveJqn95ltB6Kvw==", "dev": true, "license": "MIT" }, @@ -4173,19 +4664,29 @@ "license": "MIT" }, "node_modules/@types/lodash": { - "version": "4.17.16", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.16.tgz", - "integrity": "sha512-HX7Em5NYQAXKW+1T+FiuG27NGwzJfCX3s1GjOa7ujxZa52kjJLOr4FUxT+giF6Tgxv1e+/czV/iTtBw27WTU9g==", + "version": "4.17.19", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.19.tgz", + "integrity": "sha512-NYqRyg/hIQrYPT9lbOeYc3kIRabJDn/k4qQHIXUpx88CBDww2fD15Sg5kbXlW86zm2XEW4g0QxkTI3/Kfkc7xQ==", "dev": true, "license": "MIT" }, - "node_modules/@types/node": { - "version": "10.17.60", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", - "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", "dev": true, "license": "MIT" }, + "node_modules/@types/node": { + "version": "20.19.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.1.tgz", + "integrity": "sha512-jJD50LtlD2dodAEO653i3YF04NWak6jN3ky+Ri3Em3mGR39/glWiboM/IePaRbgwSfqM1TpGXfAg8ohn/4dTgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, "node_modules/@types/node-fetch": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", @@ -4197,13 +4698,6 @@ "form-data": "^4.0.0" } }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/retry": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz", @@ -4253,21 +4747,21 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.28.0.tgz", - "integrity": "sha512-lvFK3TCGAHsItNdWZ/1FkvpzCxTHUVuFrdnOGLMa0GGCFIbCgQWVk3CzCGdA7kM3qGVc+dfW9tr0Z/sHnGDFyg==", + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.35.0.tgz", + "integrity": "sha512-ijItUYaiWuce0N1SoSMrEd0b6b6lYkYt99pqCPfybd+HKVXtEvYhICfLdwp42MhiI5mp0oq7PKEL+g1cNiz/Eg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.28.0", - "@typescript-eslint/type-utils": "8.28.0", - "@typescript-eslint/utils": "8.28.0", - "@typescript-eslint/visitor-keys": "8.28.0", + "@typescript-eslint/scope-manager": "8.35.0", + "@typescript-eslint/type-utils": "8.35.0", + "@typescript-eslint/utils": "8.35.0", + "@typescript-eslint/visitor-keys": "8.35.0", "graphemer": "^1.4.0", - "ignore": "^5.3.1", + "ignore": "^7.0.0", "natural-compare": "^1.4.0", - "ts-api-utils": "^2.0.1" + "ts-api-utils": "^2.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4277,15 +4771,15 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "@typescript-eslint/parser": "^8.35.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", "dev": true, "license": "MIT", "engines": { @@ -4293,16 +4787,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.28.0.tgz", - "integrity": "sha512-LPcw1yHD3ToaDEoljFEfQ9j2xShY367h7FZ1sq5NJT9I3yj4LHer1Xd1yRSOdYy9BpsrxU7R+eoDokChYM53lQ==", + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.35.0.tgz", + "integrity": "sha512-6sMvZePQrnZH2/cJkwRpkT7DxoAWh+g6+GFRK6bV3YQo7ogi3SX5rgF6099r5Q53Ma5qeT7LGmOmuIutF4t3lA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.28.0", - "@typescript-eslint/types": "8.28.0", - "@typescript-eslint/typescript-estree": "8.28.0", - "@typescript-eslint/visitor-keys": "8.28.0", + "@typescript-eslint/scope-manager": "8.35.0", + "@typescript-eslint/types": "8.35.0", + "@typescript-eslint/typescript-estree": "8.35.0", + "@typescript-eslint/visitor-keys": "8.35.0", "debug": "^4.3.4" }, "engines": { @@ -4317,35 +4811,16 @@ "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.28.0.tgz", - "integrity": "sha512-u2oITX3BJwzWCapoZ/pXw6BCOl8rJP4Ij/3wPoGvY8XwvXflOzd1kLrDUUUAIEdJSFh+ASwdTHqtan9xSg8buw==", + "node_modules/@typescript-eslint/project-service": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.35.0.tgz", + "integrity": "sha512-41xatqRwWZuhUMF/aZm2fcUsOFKNcG28xqRSS6ZVr9BVJtGExosLAm5A1OxTjRMagx8nJqva+P5zNIGt8RIgbQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.28.0", - "@typescript-eslint/visitor-keys": "8.28.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.28.0.tgz", - "integrity": "sha512-oRoXu2v0Rsy/VoOGhtWrOKDiIehvI+YNrDk5Oqj40Mwm0Yt01FC/Q7nFqg088d3yAsR1ZcZFVfPCTTFCe/KPwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/typescript-estree": "8.28.0", - "@typescript-eslint/utils": "8.28.0", - "debug": "^4.3.4", - "ts-api-utils": "^2.0.1" + "@typescript-eslint/tsconfig-utils": "^8.35.0", + "@typescript-eslint/types": "^8.35.0", + "debug": "^4.3.4" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4355,16 +4830,19 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/types": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.28.0.tgz", - "integrity": "sha512-bn4WS1bkKEjx7HqiwG2JNB3YJdC1q6Ue7GyGlwPHyt0TnVq6TtD/hiOdTZt71sq0s7UzqBFXD8t8o2e63tXgwA==", + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.35.0.tgz", + "integrity": "sha512-+AgL5+mcoLxl1vGjwNfiWq5fLDZM1TmTPYs2UkyHfFhgERxBbqHlNjRzhThJqz+ktBqTChRYY6zwbMwy0591AA==", "dev": true, "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.35.0", + "@typescript-eslint/visitor-keys": "8.35.0" + }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -4373,22 +4851,12 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.28.0.tgz", - "integrity": "sha512-H74nHEeBGeklctAVUvmDkxB1mk+PAZ9FiOMPFncdqeRBXxk1lWSYraHw8V12b7aa6Sg9HOBNbGdSHobBPuQSuA==", + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.35.0.tgz", + "integrity": "sha512-04k/7247kZzFraweuEirmvUj+W3bJLI9fX6fbo1Qm2YykuBvEhRTPl8tcxlYO8kZZW+HIXfkZNoasVb8EV4jpA==", "dev": true, "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.28.0", - "@typescript-eslint/visitor-keys": "8.28.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.0.1" - }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -4400,33 +4868,17 @@ "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.28.0.tgz", - "integrity": "sha512-OELa9hbTYciYITqgurT1u/SzpQVtDLmQMFzy/N8pQE+tefOyCWT79jHsav294aTqV1q1u+VzqDGbuujvRYaeSQ==", + "node_modules/@typescript-eslint/type-utils": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.35.0.tgz", + "integrity": "sha512-ceNNttjfmSEoM9PW87bWLDEIaLAyR+E6BoYJQ5PfaDau37UGca9Nyq3lBk8Bw2ad0AKvYabz6wxc7DMTO2jnNA==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.28.0", - "@typescript-eslint/types": "8.28.0", - "@typescript-eslint/typescript-estree": "8.28.0" + "@typescript-eslint/typescript-estree": "8.35.0", + "@typescript-eslint/utils": "8.35.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -4440,16 +4892,12 @@ "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.28.0.tgz", - "integrity": "sha512-hbn8SZ8w4u2pRwgQ1GlUrPKE+t2XvcCW5tTRF7j6SMYIuYG37XuzIW44JCZPa36evi0Oy2SnM664BlIaAuQcvg==", + "node_modules/@typescript-eslint/types": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.35.0.tgz", + "integrity": "sha512-0mYH3emanku0vHw2aRLNGqe7EXh9WHEhi7kZzscrMDf6IIRUQ5Jk4wp1QrledE/36KtdZrVfKnE32eZCf/vaVQ==", "dev": true, "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.28.0", - "eslint-visitor-keys": "^4.2.0" - }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -4458,370 +4906,147 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typespec/asset-emitter": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/asset-emitter/-/asset-emitter-0.67.1.tgz", - "integrity": "sha512-yLa6FhDbPGbFRYOfmKMB0P1O5+BsOsmzWvRdeZWcNMjJFUhWLDZLAzva3kRx9jBAoIGA5+jNBjcW8VRNxEhqvg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20.0.0" - }, - "peerDependencies": { - "@typespec/compiler": "^0.67.1" - } - }, - "node_modules/@typespec/compiler": { - "version": "0.67.2", - "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-0.67.2.tgz", - "integrity": "sha512-6c47359nR6IjI4fYq+0hi1mm9GMdHQ/LdqPa/roKg1wQaBohUMBJXW7duMDcz2BTorYjoEBYalz9olMG4oqZDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "~7.26.2", - "@inquirer/prompts": "^7.3.1", - "ajv": "~8.17.1", - "change-case": "~5.4.4", - "env-paths": "^3.0.0", - "globby": "~14.1.0", - "is-unicode-supported": "^2.1.0", - "mustache": "~4.2.0", - "picocolors": "~1.1.1", - "prettier": "~3.5.3", - "semver": "^7.7.1", - "tar": "^7.4.3", - "temporal-polyfill": "^0.2.5", - "vscode-languageserver": "~9.0.1", - "vscode-languageserver-textdocument": "~1.0.12", - "yaml": "~2.7.0", - "yargs": "~17.7.2" - }, - "bin": { - "tsp": "cmd/tsp.js", - "tsp-server": "cmd/tsp-server.js" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/checkbox": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.4.tgz", - "integrity": "sha512-d30576EZdApjAMceijXA5jDzRQHT/MygbC+J8I7EqA6f/FRpYxlRtRJbHF8gHeWYeSdOuTEJqonn7QLB1ELezA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/figures": "^1.0.11", - "@inquirer/type": "^3.0.5", - "ansi-escapes": "^4.3.2", - "yoctocolors-cjs": "^2.1.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/confirm": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.8.tgz", - "integrity": "sha512-dNLWCYZvXDjO3rnQfk2iuJNL4Ivwz/T2+C3+WnNfJKsNGSuOs3wAo2F6e0p946gtSAk31nZMfW+MRmYaplPKsg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/core": { - "version": "10.1.9", - "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.9.tgz", - "integrity": "sha512-sXhVB8n20NYkUBfDYgizGHlpRVaCRjtuzNZA6xpALIUbkgfd2Hjz+DfEN6+h1BRnuxw0/P4jCIMjMsEOAMwAJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/figures": "^1.0.11", - "@inquirer/type": "^3.0.5", - "ansi-escapes": "^4.3.2", - "cli-width": "^4.1.0", - "mute-stream": "^2.0.0", - "signal-exit": "^4.1.0", - "wrap-ansi": "^6.2.0", - "yoctocolors-cjs": "^2.1.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/editor": { - "version": "4.2.9", - "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.9.tgz", - "integrity": "sha512-8HjOppAxO7O4wV1ETUlJFg6NDjp/W2NP5FB9ZPAcinAlNT4ZIWOLe2pUVwmmPRSV0NMdI5r/+lflN55AwZOKSw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5", - "external-editor": "^3.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/expand": { - "version": "4.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.11.tgz", - "integrity": "sha512-OZSUW4hFMW2TYvX/Sv+NnOZgO8CHT2TU1roUCUIF2T+wfw60XFRRp9MRUPCT06cRnKL+aemt2YmTWwt7rOrNEA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5", - "yoctocolors-cjs": "^2.1.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/input": { - "version": "4.1.8", - "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.1.8.tgz", - "integrity": "sha512-WXJI16oOZ3/LiENCAxe8joniNp8MQxF6Wi5V+EBbVA0ZIOpFcL4I9e7f7cXse0HJeIPCWO8Lcgnk98juItCi7Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/number": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.11.tgz", - "integrity": "sha512-pQK68CsKOgwvU2eA53AG/4npRTH2pvs/pZ2bFvzpBhrznh8Mcwt19c+nMO7LHRr3Vreu1KPhNBF3vQAKrjIulw==", + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.35.0.tgz", + "integrity": "sha512-F+BhnaBemgu1Qf8oHrxyw14wq6vbL8xwWKKMwTMwYIRmFFY/1n/9T/jpbobZL8vp7QyEUcC6xGrnAO4ua8Kp7w==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5" + "@typescript-eslint/project-service": "8.35.0", + "@typescript-eslint/tsconfig-utils": "8.35.0", + "@typescript-eslint/types": "8.35.0", + "@typescript-eslint/visitor-keys": "8.35.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" }, "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } - } - }, - "node_modules/@typespec/compiler/node_modules/@inquirer/password": { - "version": "4.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.11.tgz", - "integrity": "sha512-dH6zLdv+HEv1nBs96Case6eppkRggMe8LoOTl30+Gq5Wf27AO/vHFgStTVz4aoevLdNXqwE23++IXGw4eiOXTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5", - "ansi-escapes": "^4.3.2" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, - "engines": { - "node": ">=18" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typespec/compiler/node_modules/@inquirer/prompts": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.4.0.tgz", - "integrity": "sha512-EZiJidQOT4O5PYtqnu1JbF0clv36oW2CviR66c7ma4LsupmmQlUwmdReGKRp456OWPWMz3PdrPiYg3aCk3op2w==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/checkbox": "^4.1.4", - "@inquirer/confirm": "^5.1.8", - "@inquirer/editor": "^4.2.9", - "@inquirer/expand": "^4.0.11", - "@inquirer/input": "^4.1.8", - "@inquirer/number": "^3.0.11", - "@inquirer/password": "^4.0.11", - "@inquirer/rawlist": "^4.0.11", - "@inquirer/search": "^3.0.11", - "@inquirer/select": "^4.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "balanced-match": "^1.0.0" } }, - "node_modules/@typespec/compiler/node_modules/@inquirer/rawlist": { - "version": "4.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.0.11.tgz", - "integrity": "sha512-uAYtTx0IF/PqUAvsRrF3xvnxJV516wmR6YVONOmCWJbbt87HcDHLfL9wmBQFbNJRv5kCjdYKrZcavDkH3sVJPg==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, - "license": "MIT", + "license": "ISC", "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/type": "^3.0.5", - "yoctocolors-cjs": "^2.1.2" + "brace-expansion": "^2.0.1" }, "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@typespec/compiler/node_modules/@inquirer/search": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.11.tgz", - "integrity": "sha512-9CWQT0ikYcg6Ls3TOa7jljsD7PgjcsYEM0bYE+Gkz+uoW9u8eaJCRHJKkucpRE5+xKtaaDbrND+nPDoxzjYyew==", + "node_modules/@typescript-eslint/utils": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.35.0.tgz", + "integrity": "sha512-nqoMu7WWM7ki5tPgLVsmPM8CkqtoPUG6xXGeefM5t4x3XumOEKMoUZPdi+7F+/EotukN4R9OWdmDxN80fqoZeg==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/figures": "^1.0.11", - "@inquirer/type": "^3.0.5", - "yoctocolors-cjs": "^2.1.2" + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.35.0", + "@typescript-eslint/types": "8.35.0", + "@typescript-eslint/typescript-estree": "8.35.0" }, "engines": { - "node": ">=18" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, - "peerDependencies": { - "@types/node": ">=18" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typespec/compiler/node_modules/@inquirer/select": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.1.0.tgz", - "integrity": "sha512-z0a2fmgTSRN+YBuiK1ROfJ2Nvrpij5lVN3gPDkQGhavdvIVGHGW29LwYZfM/j42Ai2hUghTI/uoBuTbrJk42bA==", + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.35.0.tgz", + "integrity": "sha512-zTh2+1Y8ZpmeQaQVIc/ZZxsx8UzgKJyNg1PTvjzC7WMhPSVS8bfDX34k1SrwOf016qd5RU3az2UxUNue3IfQ5g==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/core": "^10.1.9", - "@inquirer/figures": "^1.0.11", - "@inquirer/type": "^3.0.5", - "ansi-escapes": "^4.3.2", - "yoctocolors-cjs": "^2.1.2" + "@typescript-eslint/types": "8.35.0", + "eslint-visitor-keys": "^4.2.1" }, "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@types/node": ">=18" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typespec/compiler/node_modules/@inquirer/type": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.5.tgz", - "integrity": "sha512-ZJpeIYYueOz/i/ONzrfof8g89kNdO2hjGuvULROo3O8rlB2CRtSseE5KeirnyE4t/thAn/EwvS/vuQeJCn+NZg==", + "node_modules/@typespec/asset-emitter": { + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/asset-emitter/-/asset-emitter-0.71.0.tgz", + "integrity": "sha512-wXDF2kbEPTJksv16mzcEyaz97PUxz1xH/Bl4OFSnvwE5xC1hkb0uKQ2nsunnu4yFzbz6Jmn7aoxM1WlYR5PzkA==", "dev": true, "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20.0.0" }, "peerDependencies": { - "@types/node": ">=18" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "@typespec/compiler": "^1.1.0" } }, - "node_modules/@typespec/compiler/node_modules/@types/node": { - "version": "22.13.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.13.tgz", - "integrity": "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==", + "node_modules/@typespec/compiler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-1.1.0.tgz", + "integrity": "sha512-dtwosIqd2UUEEIVBR+oDiUtN4n1lP8/9GxQVno+wbkijQgKDj4Hg0Vaq6HG4BduF7RptDdtzkdGQCS9CgOIdRA==", "dev": true, "license": "MIT", - "optional": true, - "peer": true, "dependencies": { - "undici-types": "~6.20.0" + "@babel/code-frame": "~7.27.1", + "@inquirer/prompts": "^7.4.0", + "ajv": "~8.17.1", + "change-case": "~5.4.4", + "env-paths": "^3.0.0", + "globby": "~14.1.0", + "is-unicode-supported": "^2.1.0", + "mustache": "~4.2.0", + "picocolors": "~1.1.1", + "prettier": "~3.5.3", + "semver": "^7.7.1", + "tar": "^7.4.3", + "temporal-polyfill": "^0.3.0", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.12", + "yaml": "~2.7.0", + "yargs": "~17.7.2" + }, + "bin": { + "tsp": "cmd/tsp.js", + "tsp-server": "cmd/tsp-server.js" + }, + "engines": { + "node": ">=20.0.0" } }, "node_modules/@typespec/compiler/node_modules/ajv": { @@ -4841,16 +5066,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/@typespec/compiler/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@typespec/compiler/node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", @@ -4858,68 +5073,44 @@ "dev": true, "license": "MIT" }, - "node_modules/@typespec/compiler/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@typespec/compiler/node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, - "node_modules/@typespec/compiler/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "node_modules/@typespec/compiler/node_modules/yaml": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.1.tgz", + "integrity": "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==", "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" + "license": "ISC", + "bin": { + "yaml": "bin.mjs" }, "engines": { - "node": ">=8" + "node": ">= 14" } }, "node_modules/@typespec/events": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/events/-/events-0.67.1.tgz", - "integrity": "sha512-4pd/FEd+y72h2eUOlwGavK+nv3SDp7ZUJkGTcARyjLH5aSIAOl4uYW+WzQjGJylu/9t+xmoHy47siOvYBxONkQ==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/events/-/events-0.71.0.tgz", + "integrity": "sha512-dJeyqBGqTTSlFDVWpdqeMjDpEyRmenH3yDABK3T/30MrO94sdXigxmeBnPCcOaaqst6pV3anFuKwfAqEN3GnbA==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1" + "@typespec/compiler": "^1.1.0" } }, "node_modules/@typespec/http": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/http/-/http-0.67.1.tgz", - "integrity": "sha512-pkLFdKLA5ObCptUuwL8mhiy6EqVbqmtvHK89zqiTfYYGw2qm76+EUHaK0P/g2aAmjcwlrDGhJ0EhzbVp87H0mg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@typespec/http/-/http-1.1.0.tgz", + "integrity": "sha512-1doVGmkv3N8l57fVuci4jGMZ61EZBlDzuNZO2b9o0+mexCOs/P96CIpFkaNVvTQgjpyFsW1DlXiUKAvUC9zQfg==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1", - "@typespec/streams": "^0.67.1" + "@typespec/compiler": "^1.1.0", + "@typespec/streams": "^0.71.0" }, "peerDependenciesMeta": { "@typespec/streams": { @@ -4928,28 +5119,28 @@ } }, "node_modules/@typespec/openapi": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-0.67.1.tgz", - "integrity": "sha512-9/122dHw6ZA+laqHM1mqa0CWxg0lBhEqdVX74YoAOlE+NR2wIpUwwC4WIVTvIllDIl6hwV+zVgILtbvD8W5+1A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-1.1.0.tgz", + "integrity": "sha512-HPvrpSS7eSVk3fEkWndcDTrAZssWRYv3FyDTqVqljildc7FAiXdo88+r5CCK8endmgIrES7uJdHLkcIGUZx1pg==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1", - "@typespec/http": "^0.67.1" + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0" } }, "node_modules/@typespec/openapi3": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/openapi3/-/openapi3-0.67.1.tgz", - "integrity": "sha512-Qe6kRQmet3bK2VPAK3IDK3JA0s1pNCJpDmrbBRCKxstvYWGwdTEVjiWRJl4kn17i4T51bxUrORKd9Si6R4M7Kg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@typespec/openapi3/-/openapi3-1.1.0.tgz", + "integrity": "sha512-+1Ue7+M/PkNX54H6SJAym5ONHzlW7s5ZnA4fCH5jwKvalvI94stMvefOpd8FAesJDVmXc3wZ0kiqYo5EuMTjOQ==", "dev": true, "license": "MIT", "dependencies": { "@apidevtools/swagger-parser": "~10.1.1", - "@typespec/asset-emitter": "^0.67.1", + "@typespec/asset-emitter": "^0.71.0", "openapi-types": "~12.1.3", "yaml": "~2.7.0" }, @@ -4960,11 +5151,11 @@ "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1", - "@typespec/http": "^0.67.1", - "@typespec/json-schema": "^0.67.1", - "@typespec/openapi": "^0.67.1", - "@typespec/versioning": "^0.67.1" + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0", + "@typespec/json-schema": "^1.1.0", + "@typespec/openapi": "^1.1.0", + "@typespec/versioning": "^0.71.0" }, "peerDependenciesMeta": { "@typespec/json-schema": { @@ -4978,10 +5169,23 @@ } } }, + "node_modules/@typespec/openapi3/node_modules/yaml": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.1.tgz", + "integrity": "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/@typespec/prettier-plugin-typespec": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/prettier-plugin-typespec/-/prettier-plugin-typespec-0.67.1.tgz", - "integrity": "sha512-Zdty90MRA7CINwc7Sy4HC/s7AZN+Cw2vmyrl0GiT3ZvtVTCAUYabEOXvcWEolL2BfSryikzUZF4cGo7DaYBTgA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@typespec/prettier-plugin-typespec/-/prettier-plugin-typespec-1.1.0.tgz", + "integrity": "sha512-Wi2ShQdbSXDq+sysguTXq4o1vVIQNZreyy0COVL3DnMZbq92TH0d9dtexOqEJsNsCbfx4RFD5il4dAbSX1/88w==", "dev": true, "license": "MIT", "dependencies": { @@ -4989,83 +5193,133 @@ } }, "node_modules/@typespec/rest": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.67.1.tgz", - "integrity": "sha512-19IzFoaM0yFBSXpfrJgZEBVXtvEkMEprKc5B0kF4ylEPs32ShtZj05BXYrAkmMZbCsk0AC/VZdmVgcWP+AT6GQ==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.71.0.tgz", + "integrity": "sha512-5qX+nWO5Jx4P1iTTT2REgdCtHsTMjlv/gL90u8cO1ih3yHDtf18a41UL6jSYaVUIvIj6rlmrgopActf0FhhUcw==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1", - "@typespec/http": "^0.67.1" + "@typespec/compiler": "^1.1.0", + "@typespec/http": "^1.1.0" } }, "node_modules/@typespec/sse": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/sse/-/sse-0.67.1.tgz", - "integrity": "sha512-Y7O002u89nM55hc81/wadMG0+gnj9hr0i4icqOxjP7auWsYDwMoK7arxC+qM7tyyFGMgv/F0ZxNJmc2Ajq7kpQ==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/sse/-/sse-0.71.0.tgz", + "integrity": "sha512-4lAwDMj8h/50s6zp/8IX8CLW+H3P+od5O32Bb8+fyTabEo7+H3PbdBbIJGv9Sj7+l8+bZXsyfRXa+aJxR9o2ZA==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1", - "@typespec/events": "^0.67.1", - "@typespec/http": "^0.67.1", - "@typespec/streams": "^0.67.1" + "@typespec/compiler": "^1.1.0", + "@typespec/events": "^0.71.0", + "@typespec/http": "^1.1.0", + "@typespec/streams": "^0.71.0" } }, "node_modules/@typespec/streams": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/streams/-/streams-0.67.1.tgz", - "integrity": "sha512-it+WNzurrk+TEzLvqlbCreyATmSR/g61/YX/k1D+B/QThPv8bh2S1sQqKtUMeThCu4/MHhZL9xTtdxWcLww+lg==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/streams/-/streams-0.71.0.tgz", + "integrity": "sha512-ofyAcg8GnO6uTffGo00D6MMfRkqie4QtnUUSGNC1Bam2WG+wkeSG/huP0WNRT8GofzK1N0M6QqQwAW/vdq9ymQ==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1" + "@typespec/compiler": "^1.1.0" + } + }, + "node_modules/@typespec/ts-http-runtime": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@typespec/ts-http-runtime/-/ts-http-runtime-0.2.3.tgz", + "integrity": "sha512-oRhjSzcVjX8ExyaF8hC0zzTqxlVuRlgMHL/Bh4w3xB9+wjbm0FpXylVU/lBrn+kgphwYTrOk3tp+AVShGmlYCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@typespec/versioning": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.67.1.tgz", - "integrity": "sha512-i1eZT8JlCthkRHJS3NH/nZTHUD7gJozP6pVy8wyHBx6TbnDOTfQ1P5YVlL2pF4ZdeRbGFhOKiUF/usEIOrkaVw==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.71.0.tgz", + "integrity": "sha512-8qknFLOpZTVzQ+SveXg9G7WJV8P80yxLlj0nOc3ZLBKiPgM6FF7vGWHRNtnh7s3gSXvWyxopaJ9fZSLZSJmbww==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1" + "@typespec/compiler": "^1.1.0" } }, "node_modules/@typespec/xml": { - "version": "0.67.1", - "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.67.1.tgz", - "integrity": "sha512-WDCxdtvlcUvD4AunpSje22Hb0BZzpluHATkx07/ru6HhdJsiwrc//IgGbV5eah9M6gK76sGXLicBLAFlxDfvDw==", + "version": "0.71.0", + "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.71.0.tgz", + "integrity": "sha512-IcBM4fd5li+hfaUoxeiFrUJx+gCGwIJ+LojdbAZPP3Kbdv12RS+8+CHH6d9qGV3qExgWGCny6WDUrUIaVCLonw==", "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" }, "peerDependencies": { - "@typespec/compiler": "^0.67.1" + "@typespec/compiler": "^1.1.0" + } + }, + "node_modules/@vitest/coverage-v8": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@bcoe/v8-coverage": "^1.0.2", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.17", + "magicast": "^0.3.5", + "std-env": "^3.9.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } } }, "node_modules/@vitest/expect": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.9.tgz", - "integrity": "sha512-5eCqRItYgIML7NNVgJj6TVCmdzE7ZVgJhruW0ziSQV4V7PvLkDL1bBkBdcTs/VuIz0IxPb5da1IDSqc1TR9eig==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.9", - "@vitest/utils": "3.0.9", + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, @@ -5073,10 +5327,37 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, "node_modules/@vitest/pretty-format": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.9.tgz", - "integrity": "sha512-OW9F8t2J3AwFEwENg3yMyKWweF7oRJlMyHOMIhO5F3n0+cgQAJZBjNgrF8dLwFTEXl5jUqBLXd9QyyKv8zEcmA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, "license": "MIT", "dependencies": { @@ -5087,27 +5368,28 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.9.tgz", - "integrity": "sha512-NX9oUXgF9HPfJSwl8tUZCMP1oGx2+Sf+ru6d05QjzQz4OwWg0psEzwY6VexP2tTHWdOkhKHUIZH+fS6nA7jfOw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.9", - "pathe": "^2.0.3" + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.9.tgz", - "integrity": "sha512-AiLUiuZ0FuA+/8i19mTYd+re5jqjEc2jZbgJ2up0VY0Ddyyxg/uUtBDpIFAy4uzKaQxOW8gMgBdAJJ2ydhu39A==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", + "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" }, @@ -5116,27 +5398,27 @@ } }, "node_modules/@vitest/spy": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.9.tgz", - "integrity": "sha512-/CcK2UDl0aQ2wtkp3YVWldrpLRNCfVcIOFGlVGKO4R5eajsH393Z1yiXLVQ7vWsj26JOEjeZI0x5sm5P4OGUNQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, "license": "MIT", "dependencies": { - "tinyspy": "^3.0.2" + "tinyspy": "^4.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.9.tgz", - "integrity": "sha512-ilHM5fHhZ89MCp5aAaM9uhfl1c2JdxVxl3McqsdVyVNN6JffnEen8UMCdRTzOhGXNQGo5GNL9QugHrz727Wnng==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.9", - "loupe": "^3.1.3", + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" }, "funding": { @@ -5157,9 +5439,9 @@ } }, "node_modules/acorn": { - "version": "8.14.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", - "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", "bin": { @@ -5179,19 +5461,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-walk": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", - "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/agent-base": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", @@ -5323,13 +5592,6 @@ "dev": true, "license": "MIT" }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true, - "license": "MIT" - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -5376,6 +5638,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true, + "license": "MIT" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -5406,6 +5675,25 @@ "node": ">=12" } }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.3.tgz", + "integrity": "sha512-MuXMrSLVVoA6sYN/6Hke18vMzrT4TZNbZIj/hvh0fnYFpO+/kFXcLIaiPwXXWaQUPg4yJD8fj+lfJ7/1EBconw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "estree-walker": "^3.0.3", + "js-tokens": "^9.0.1" + } + }, + "node_modules/ast-v8-to-istanbul/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/astring": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz", @@ -5451,9 +5739,9 @@ "license": "MIT" }, "node_modules/autorest": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/autorest/-/autorest-3.7.1.tgz", - "integrity": "sha512-6q17NtosQZPqBkIOUnaOPedf3PDIBF7Ha1iEGRhTqZF6TG2Q/1E3ID/D+ePIIzZDKvW01p/2pENq/oiBWH9IGQ==", + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/autorest/-/autorest-3.7.2.tgz", + "integrity": "sha512-yEeF0tJjx2fROK9VWIVHKFiUSzD0cxwqnq7z+v7kIIRRZjyOM3rpBS9OPp6tQv5d3mmxPAUNh57G1ZumQNqQGg==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -5498,9 +5786,9 @@ "license": "MIT" }, "node_modules/axios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", - "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.10.0.tgz", + "integrity": "sha512-/1xYAC4MP/HEG+3duIhFr4ZQXR4sQXOIe+o6sdqzeykGLx6Upp/1p8MHqhINOvGeP7xyNHe7tsiJByc4SSVUxw==", "dev": true, "license": "MIT", "dependencies": { @@ -5559,13 +5847,14 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0" + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" } }, "node_modules/braces": { @@ -5592,9 +5881,9 @@ } }, "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", "dev": true, "funding": [ { @@ -5612,10 +5901,10 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" + "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" @@ -5725,9 +6014,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001707", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001707.tgz", - "integrity": "sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw==", + "version": "1.0.30001726", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz", + "integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==", "dev": true, "funding": [ { @@ -5803,9 +6092,9 @@ } }, "node_modules/chardet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.0.0.tgz", - "integrity": "sha512-xVgPpulCooDjY6zH4m9YW3jbkaBe3FKIAvF5sj5t7aBNsVl2ljIE+xwJ4iNgiDZHFQvNIpjdKdVOQvvk5ZfxbQ==", + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true, "license": "MIT" }, @@ -5955,6 +6244,24 @@ "node": ">=8" } }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/color": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", @@ -6057,6 +6364,16 @@ "node": "*" } }, + "node_modules/component-emitter": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.1.tgz", + "integrity": "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -6064,14 +6381,21 @@ "dev": true, "license": "MIT" }, + "node_modules/cookiejar": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", + "dev": true, + "license": "MIT" + }, "node_modules/core-js-compat": { - "version": "3.41.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.41.0.tgz", - "integrity": "sha512-RFsU9LySVue9RTwdDVX/T0e2Y6jRYWXERKElIjpuEOEnxaXffI0X7RUwVzfYLfzuLXSNJDYoRYUAmRUcyln20A==", + "version": "3.43.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.43.0.tgz", + "integrity": "sha512-2GML2ZsCc5LR7hZYz4AXmjQw8zuy2T//2QntwdnpuYI7jteT6GVYJL7F6C2C57R7gSYrcqVW3lAALefdbhBLDA==", "dev": true, "license": "MIT", "dependencies": { - "browserslist": "^4.24.4" + "browserslist": "^4.25.0" }, "funding": { "type": "opencollective", @@ -6085,12 +6409,24 @@ "dev": true, "license": "MIT" }, - "node_modules/create-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", "dev": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } }, "node_modules/cross-spawn": { "version": "7.0.6", @@ -6202,9 +6538,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6229,13 +6565,6 @@ "node": ">=0.10.0" } }, - "node_modules/deep-diff": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/deep-diff/-/deep-diff-1.0.2.tgz", - "integrity": "sha512-aWS3UIVH+NPGCD1kki+DCU9Dua032iSsO43LqQpcs4R3+dVv7tX0qBGjiVHJHjplsoUM2XRO/KB92glqc68awg==", - "dev": true, - "license": "MIT" - }, "node_modules/deep-eql": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", @@ -6331,14 +6660,15 @@ "minimalistic-assert": "^1.0.0" } }, - "node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.3.1" + "license": "ISC", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" } }, "node_modules/difflib": { @@ -6354,9 +6684,9 @@ } }, "node_modules/dotenv": { - "version": "16.4.7", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", - "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", "dev": true, "license": "BSD-2-Clause", "engines": { @@ -6366,6 +6696,18 @@ "url": "https://dotenvx.com" } }, + "node_modules/dreamopt": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/dreamopt/-/dreamopt-0.8.0.tgz", + "integrity": "sha512-vyJTp8+mC+G+5dfgsY+r3ckxlz+QMX40VjPQsZc5gxVAxLmi64TBoVkP54A/pRAXMXsbu2GMMBrZPxNv23waMg==", + "dev": true, + "dependencies": { + "wordwrap": ">=0.0.2" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -6400,9 +6742,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.5.123", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.123.tgz", - "integrity": "sha512-refir3NlutEZqlKaBLK0tzlVLe5P2wDKS7UQt/3SpibizgsRAPOsqQC3ffw1nlv3ze5gjRQZYHoPymgVZkplFA==", + "version": "1.5.177", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.177.tgz", + "integrity": "sha512-7EH2G59nLsEMj97fpDuvVcYi6lwTcM1xuWw3PssD8xzboAW7zj7iB3COEEEATUfjLHrs5uKBLQT03V/8URx06g==", "dev": true, "license": "ISC" }, @@ -6448,9 +6790,9 @@ "license": "MIT" }, "node_modules/es-abstract": { - "version": "1.23.9", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.9.tgz", - "integrity": "sha512-py07lI0wjxAC/DcfK1S6G7iANonniZwTISvdPzk9hzeH0IZIshbuuFxLIU96OyF89Yb9hiqWn8M/bY83KY5vzA==", + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", "dev": true, "license": "MIT", "dependencies": { @@ -6458,18 +6800,18 @@ "arraybuffer.prototype.slice": "^1.0.4", "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", - "call-bound": "^1.0.3", + "call-bound": "^1.0.4", "data-view-buffer": "^1.0.2", "data-view-byte-length": "^1.0.2", "data-view-byte-offset": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", + "es-object-atoms": "^1.1.1", "es-set-tostringtag": "^2.1.0", "es-to-primitive": "^1.3.0", "function.prototype.name": "^1.1.8", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.0", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", "get-symbol-description": "^1.1.0", "globalthis": "^1.0.4", "gopd": "^1.2.0", @@ -6481,21 +6823,24 @@ "is-array-buffer": "^3.0.5", "is-callable": "^1.2.7", "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", "is-regex": "^1.2.1", + "is-set": "^2.0.3", "is-shared-array-buffer": "^1.0.4", "is-string": "^1.1.1", "is-typed-array": "^1.1.15", - "is-weakref": "^1.1.0", + "is-weakref": "^1.1.1", "math-intrinsics": "^1.1.0", - "object-inspect": "^1.13.3", + "object-inspect": "^1.13.4", "object-keys": "^1.1.1", "object.assign": "^4.1.7", "own-keys": "^1.0.1", - "regexp.prototype.flags": "^1.5.3", + "regexp.prototype.flags": "^1.5.4", "safe-array-concat": "^1.1.3", "safe-push-apply": "^1.0.0", "safe-regex-test": "^1.1.0", "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", "string.prototype.trim": "^1.2.10", "string.prototype.trimend": "^1.0.9", "string.prototype.trimstart": "^1.0.8", @@ -6504,7 +6849,7 @@ "typed-array-byte-offset": "^1.0.4", "typed-array-length": "^1.0.7", "unbox-primitive": "^1.1.0", - "which-typed-array": "^1.1.18" + "which-typed-array": "^1.1.19" }, "engines": { "node": ">= 0.4" @@ -6514,18 +6859,18 @@ } }, "node_modules/es-aggregate-error": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/es-aggregate-error/-/es-aggregate-error-1.0.13.tgz", - "integrity": "sha512-KkzhUUuD2CUMqEc8JEqsXEMDHzDPE8RCjZeUBitsnB1eNcAJWQPiciKsMXe3Yytj4Flw1XLl46Qcf9OxvZha7A==", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/es-aggregate-error/-/es-aggregate-error-1.0.14.tgz", + "integrity": "sha512-3YxX6rVb07B5TV11AV5wsL7nQCHXNwoHPsQC8S4AmBiqYhyNCJ5BRKXkXyDJvs8QzXN20NgRtxe3dEEQD9NLHA==", "dev": true, "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", + "es-abstract": "^1.24.0", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "globalthis": "^1.0.3", + "globalthis": "^1.0.4", "has-property-descriptors": "^1.0.2", "set-function-name": "^2.0.2" }, @@ -6557,9 +6902,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, @@ -6611,9 +6956,9 @@ } }, "node_modules/esbuild": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", - "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", + "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -6624,31 +6969,31 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.1", - "@esbuild/android-arm": "0.25.1", - "@esbuild/android-arm64": "0.25.1", - "@esbuild/android-x64": "0.25.1", - "@esbuild/darwin-arm64": "0.25.1", - "@esbuild/darwin-x64": "0.25.1", - "@esbuild/freebsd-arm64": "0.25.1", - "@esbuild/freebsd-x64": "0.25.1", - "@esbuild/linux-arm": "0.25.1", - "@esbuild/linux-arm64": "0.25.1", - "@esbuild/linux-ia32": "0.25.1", - "@esbuild/linux-loong64": "0.25.1", - "@esbuild/linux-mips64el": "0.25.1", - "@esbuild/linux-ppc64": "0.25.1", - "@esbuild/linux-riscv64": "0.25.1", - "@esbuild/linux-s390x": "0.25.1", - "@esbuild/linux-x64": "0.25.1", - "@esbuild/netbsd-arm64": "0.25.1", - "@esbuild/netbsd-x64": "0.25.1", - "@esbuild/openbsd-arm64": "0.25.1", - "@esbuild/openbsd-x64": "0.25.1", - "@esbuild/sunos-x64": "0.25.1", - "@esbuild/win32-arm64": "0.25.1", - "@esbuild/win32-ia32": "0.25.1", - "@esbuild/win32-x64": "0.25.1" + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" } }, "node_modules/escalade": { @@ -6675,20 +7020,20 @@ } }, "node_modules/eslint": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.23.0.tgz", - "integrity": "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==", + "version": "9.29.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.29.0.tgz", + "integrity": "sha512-GsGizj2Y1rCWDu6XoEekL3RLilp0voSePurjZIkxL3wlm5o5EC9VpgaP7lrCvjnkuLvzFBQWB3vWB3K5KQTveQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.2", - "@eslint/config-helpers": "^0.2.0", - "@eslint/core": "^0.12.0", + "@eslint/config-array": "^0.20.1", + "@eslint/config-helpers": "^0.2.1", + "@eslint/core": "^0.14.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.23.0", - "@eslint/plugin-kit": "^0.2.7", + "@eslint/js": "9.29.0", + "@eslint/plugin-kit": "^0.3.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -6699,9 +7044,9 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.3.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -6736,9 +7081,9 @@ } }, "node_modules/eslint-plugin-unicorn": { - "version": "58.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-58.0.0.tgz", - "integrity": "sha512-fc3iaxCm9chBWOHPVjn+Czb/wHS0D2Mko7wkOdobqo9R2bbFObc4LyZaLTNy0mhZOP84nKkLhTUQxlLOZ7EjKw==", + "version": "59.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-59.0.1.tgz", + "integrity": "sha512-EtNXYuWPUmkgSU2E7Ttn57LbRREQesIP1BiLn7OZLKodopKfDXfBUkC/0j6mpw2JExwf43Uf3qLSvrSvppgy8Q==", "dev": true, "license": "MIT", "dependencies": { @@ -6749,12 +7094,12 @@ "clean-regexp": "^1.0.0", "core-js-compat": "^3.41.0", "esquery": "^1.6.0", + "find-up-simple": "^1.0.1", "globals": "^16.0.0", "indent-string": "^5.0.0", "is-builtin-module": "^5.0.0", "jsesc": "^3.1.0", "pluralize": "^8.0.0", - "read-package-up": "^11.0.0", "regexp-tree": "^0.1.27", "regjsparser": "^0.12.0", "semver": "^7.7.1", @@ -6770,23 +7115,37 @@ "eslint": ">=9.22.0" } }, - "node_modules/eslint-plugin-unicorn/node_modules/globals": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.0.0.tgz", - "integrity": "sha512-iInW14XItCXET01CQFqudPOWP2jYMl7T+QRQT+UNcR/iQncN/F0UNpgd76iFkBPgNQb4+X3LV9tLJYzwh+Gl3A==", + "node_modules/eslint-plugin-unicorn/node_modules/@eslint/core": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.13.0.tgz", + "integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==", "dev": true, - "license": "MIT", + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, "engines": { - "node": ">=18" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/@eslint/plugin-kit": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.8.tgz", + "integrity": "sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.13.0", + "levn": "^0.4.1" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -6801,9 +7160,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -6813,17 +7172,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, "node_modules/eslint/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -6841,117 +7189,16 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/eslint/node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/eslint/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/eslint/node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/eslint/node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7041,24 +7288,24 @@ } }, "node_modules/execa": { - "version": "9.5.2", - "resolved": "https://registry.npmjs.org/execa/-/execa-9.5.2.tgz", - "integrity": "sha512-EHlpxMCpHWSAh1dgS6bVeoLAXGnJNdR93aabr4QCGbzOM73o5XmRfM/e5FUqsw3aagP8S8XEWUWFAxnRBnAF0Q==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.0.tgz", + "integrity": "sha512-jpWzZ1ZhwUmeWRhS7Qv3mhpOhLfwI+uAX4e5fOcXqwMR7EcJ0pj2kV1CVzHVMX/LphnKWD3LObjZCoJ71lKpHw==", "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^4.0.0", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.6", "figures": "^6.1.0", "get-stream": "^9.0.0", - "human-signals": "^8.0.0", + "human-signals": "^8.0.1", "is-plain-obj": "^4.1.0", "is-stream": "^4.0.1", "npm-run-path": "^6.0.0", - "pretty-ms": "^9.0.0", + "pretty-ms": "^9.2.0", "signal-exit": "^4.1.0", "strip-final-newline": "^4.0.0", - "yoctocolors": "^2.0.0" + "yoctocolors": "^2.1.1" }, "engines": { "node": "^18.19.0 || >=20.5.0" @@ -7123,9 +7370,9 @@ } }, "node_modules/expect-type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", - "integrity": "sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", "dev": true, "license": "Apache-2.0", "engines": { @@ -7154,26 +7401,6 @@ "node": ">=4" } }, - "node_modules/external-editor/node_modules/chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", - "dev": true, - "license": "MIT" - }, - "node_modules/external-editor/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -7208,6 +7435,19 @@ "node": ">=8.6.0" } }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", @@ -7229,6 +7469,13 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true, + "license": "MIT" + }, "node_modules/fast-uri": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", @@ -7328,30 +7575,6 @@ "proper-lockfile": "^1.2.0" } }, - "node_modules/file-js/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/file-js/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/file-js/node_modules/proper-lockfile": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-1.2.0.tgz", @@ -7420,6 +7643,29 @@ "unit-compare": "^1.0.1" } }, + "node_modules/filehound/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filehound/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/filesize": { "version": "10.1.4", "resolved": "https://registry.npmjs.org/filesize/-/filesize-10.1.4.tgz", @@ -7444,17 +7690,20 @@ } }, "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "license": "MIT", "dependencies": { - "locate-path": "^5.0.0", + "locate-path": "^6.0.0", "path-exists": "^4.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/find-up-simple": { @@ -7483,18 +7732,11 @@ "engines": { "node": ">=16" } - }, - "node_modules/flat-cache/node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", - "dev": true, - "license": "ISC" - }, - "node_modules/flatted": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz", - "integrity": "sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==", + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true, "license": "ISC" }, @@ -7577,15 +7819,16 @@ } }, "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.3.tgz", + "integrity": "sha512-qsITQPfmvMOSAdeyZ+12I1c+CKSstAFAwu+97zrnWAbIr5u8wfsExUzCesVLC8NgHuRUqNN4Zy6UPWUTRGslcA==", "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -7605,6 +7848,38 @@ "node": ">=12.20.0" } }, + "node_modules/formidable": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.5.tgz", + "integrity": "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@paralleldrive/cuid2": "^2.2.2", + "dezalgo": "^1.0.4", + "once": "^1.4.0", + "qs": "^6.11.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/front-matter": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz", @@ -7767,6 +8042,19 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/get-intrinsic": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", @@ -7950,46 +8238,22 @@ } }, "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^1.1.7" + "is-glob": "^4.0.3" }, "engines": { - "node": "*" + "node": ">=10.13.0" } }, "node_modules/globals": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", - "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.2.0.tgz", + "integrity": "sha512-O+7l9tPdHCU320IigZZPj5zmRCFG9xHmx9cU8FqU2Rp+JN714seHV+2S9+JslCpY4gJwU2vOGox0wzgae/MCEg==", "dev": true, "license": "MIT", "engines": { @@ -8037,6 +8301,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/globby/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -8064,6 +8338,16 @@ "dev": true, "license": "MIT" }, + "node_modules/graphlib": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", + "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash": "^4.17.15" + } + }, "node_modules/growly": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz", @@ -8219,26 +8503,6 @@ "dev": true, "license": "MIT" }, - "node_modules/hosted-git-info": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", - "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -8332,9 +8596,9 @@ } }, "node_modules/human-signals": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.0.tgz", - "integrity": "sha512-/1/GPCpDUCCYwlERiYjxoczfP0zfvZMU/OWgQPMya9AbAE24vseigFdhAMObpc8Q4lc/kjutPfUddDYyAmejnA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -8342,9 +8606,9 @@ } }, "node_modules/humanize-duration": { - "version": "3.32.1", - "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.32.1.tgz", - "integrity": "sha512-inh5wue5XdfObhu/IGEMiA1nUXigSGcaKNemcbLRKa7jXYGDZXr3LoT9pTIzq2hPEbld7w/qv9h+ikWGz8fL1g==", + "version": "3.33.0", + "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.33.0.tgz", + "integrity": "sha512-vYJX7BSzn7EQ4SaP2lPYVy+icHDppB6k7myNeI3wrSRfwMS5+BHyGgzpHR0ptqJ2AQ6UuIKrclSg5ve6Ci4IAQ==", "dev": true, "license": "Unlicense" }, @@ -8359,22 +8623,22 @@ } }, "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dev": true, "license": "MIT", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" + "safer-buffer": ">= 2.1.2 < 3" }, "engines": { "node": ">=0.10.0" } }, "node_modules/ignore": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", - "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, "license": "MIT", "engines": { @@ -8432,19 +8696,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/index-to-position": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/index-to-position/-/index-to-position-1.0.0.tgz", - "integrity": "sha512-sCO7uaLVhRJ25vz1o8s9IFM3nVS4DkuQnyjMwiQPKvQuBYBDmb8H7zx8ki7nVh4HJQOdVWebyvLE0qt+clruxA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -8725,6 +8976,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -8765,6 +9029,29 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-primitive": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz", + "integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-regex": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", @@ -8970,6 +9257,16 @@ "dev": true, "license": "ISC" }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -9002,22 +9299,6 @@ "node": ">=10" } }, - "node_modules/istanbul-lib-report/node_modules/make-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/istanbul-lib-source-maps": { "version": "5.0.6", "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", @@ -9144,6 +9425,24 @@ "dev": true, "license": "MIT" }, + "node_modules/json-diff": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/json-diff/-/json-diff-1.0.6.tgz", + "integrity": "sha512-tcFIPRdlc35YkYdGxcamJjllUhXWv4n2rK9oJ2RsAzV4FBkuV4ojKEDgcZ+kpKxDmJKv+PFK65+1tVVOnSeEqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ewoudenberg/difflib": "0.1.0", + "colors": "^1.4.0", + "dreamopt": "~0.8.0" + }, + "bin": { + "json-diff": "bin/json-diff.js" + }, + "engines": { + "node": "*" + } + }, "node_modules/json-merge-patch": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json-merge-patch/-/json-merge-patch-1.0.2.tgz", @@ -9164,6 +9463,73 @@ "foreach": "^2.0.4" } }, + "node_modules/json-refs": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", + "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", + "dev": true, + "license": "MIT", + "dependencies": { + "commander": "~4.1.1", + "graphlib": "^2.1.8", + "js-yaml": "^3.13.1", + "lodash": "^4.17.15", + "native-promise-only": "^0.8.1", + "path-loader": "^1.0.10", + "slash": "^3.0.0", + "uri-js": "^4.2.2" + }, + "bin": { + "json-refs": "bin/json-refs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/json-refs/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/json-refs/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/json-refs/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-refs/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/json-schema": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", @@ -9273,14 +9639,50 @@ "node": ">=8" } }, + "node_modules/junit-report-builder/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/junit-report-builder/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, "license": "MIT", - "dependencies": { - "json-buffer": "3.0.1" + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, "node_modules/kuler": { @@ -9325,16 +9727,19 @@ } }, "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, "license": "MIT", "dependencies": { - "p-locate": "^4.1.0" + "p-locate": "^5.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/lodash": { @@ -9410,9 +9815,9 @@ } }, "node_modules/loupe": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", - "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.4.tgz", + "integrity": "sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==", "dev": true, "license": "MIT" }, @@ -9459,42 +9864,25 @@ } }, "node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, "license": "MIT", "dependencies": { - "semver": "^6.0.0" + "semver": "^7.5.3" }, "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true, - "license": "ISC" - }, "node_modules/marked": { - "version": "15.0.7", - "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz", - "integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==", + "version": "15.0.12", + "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.12.tgz", + "integrity": "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==", "dev": true, "license": "MIT", "bin": { @@ -9535,9 +9923,9 @@ "license": "MIT" }, "node_modules/memfs": { - "version": "4.17.0", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-4.17.0.tgz", - "integrity": "sha512-4eirfZ7thblFmqFjywlTmuWVSvccHAJbn1r8qQLzmTO11qcqpohOjmY2mFce6x7x7WtskzRqApPD0hv+Oa74jg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-4.17.2.tgz", + "integrity": "sha512-NgYhCOWgovOXSzvYgUW0LQ7Qy72rWQMGGFJDoWg4G30RHd3z77VbYdtJ4fembJXBy8pMIUA31XNAupobOQlwdg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -9564,6 +9952,16 @@ "node": ">= 8" } }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/micromatch": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", @@ -9578,6 +9976,19 @@ "node": ">=8.6" } }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -9629,16 +10040,16 @@ "license": "ISC" }, "node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^2.0.1" + "brace-expansion": "^1.1.7" }, "engines": { - "node": ">=10" + "node": "*" } }, "node_modules/minimist": { @@ -9662,14 +10073,13 @@ } }, "node_modules/minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", + "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", "dev": true, "license": "MIT", "dependencies": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" + "minipass": "^7.1.2" }, "engines": { "node": ">= 18" @@ -9744,6 +10154,13 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/native-promise-only": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", + "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", + "dev": true, + "license": "MIT" + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -9794,6 +10211,26 @@ "node": ">=16" } }, + "node_modules/newman/node_modules/chardet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.0.0.tgz", + "integrity": "sha512-xVgPpulCooDjY6zH4m9YW3jbkaBe3FKIAvF5sj5t7aBNsVl2ljIE+xwJ4iNgiDZHFQvNIpjdKdVOQvvk5ZfxbQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/newman/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/newman/node_modules/mkdirp": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", @@ -9886,6 +10323,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", "dev": true, "funding": [ { @@ -9970,21 +10408,6 @@ "dev": true, "license": "MIT" }, - "node_modules/normalize-package-data": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", - "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^7.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, "node_modules/npm-run-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", @@ -10026,9 +10449,9 @@ } }, "node_modules/oav": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/oav/-/oav-3.6.0.tgz", - "integrity": "sha512-MB8/suEE9f1jibuiy35F5v6kHTZV9aDEjUH3W48UrnekDHIynANGyCxgEl2gGepb0NyuVz9nlDLzd3Q4fAsVVg==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/oav/-/oav-3.6.2.tgz", + "integrity": "sha512-bOH69U424+lrZMAXLPe7Fh65wN3BSZgk4d8/veacmUJ2lmeY3Xe6LqnX1Z8X9LnsbdrZycvKTz5LJ6KNrEcQLA==", "dev": true, "license": "MIT", "dependencies": { @@ -10054,7 +10477,7 @@ "json-merge-patch": "^1.0.2", "json-pointer": "^0.6.2", "json-schema-traverse": "^0.4.1", - "jsonpath-plus": "^10.2.0", + "jsonpath-plus": "^10.3.0", "junit-report-builder": "^3.0.0", "lodash": "^4.17.21", "md5-file": "^5.0.0", @@ -10158,6 +10581,62 @@ "wrap-ansi": "^6.2.0" } }, + "node_modules/oav/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/oav/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/oav/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/oav/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/oav/node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -10182,21 +10661,6 @@ "uuid": "bin/uuid" } }, - "node_modules/oav/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/oav/node_modules/y18n": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", @@ -10369,32 +10833,35 @@ } }, "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "license": "MIT", "dependencies": { - "p-try": "^2.0.0" + "yocto-queue": "^0.1.0" }, "engines": { - "node": ">=6" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "license": "MIT", "dependencies": { - "p-limit": "^2.2.0" + "p-limit": "^3.0.2" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-try": { @@ -10427,37 +10894,6 @@ "node": ">=6" } }, - "node_modules/parse-json": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-8.2.0.tgz", - "integrity": "sha512-eONBZy4hm2AgxjNFd8a4nyDJnzUAH0g34xSQAwWEVGCjdZ4ZL7dKZBfq267GWP/JaS9zW62Xs2FeAdDvpHHJGQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.26.2", - "index-to-position": "^1.0.0", - "type-fest": "^4.37.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json/node_modules/type-fest": { - "version": "4.38.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.38.0.tgz", - "integrity": "sha512-2dBz5D5ycHIoliLYLi0Q2V7KRaDlH0uWIvmk7TYlAg5slqwiPv1ezJdZm1QEM0xgk29oYWMCbIG7E6gHpvChlg==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", @@ -10498,6 +10934,17 @@ "node": ">=8" } }, + "node_modules/path-loader": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.12.tgz", + "integrity": "sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "native-promise-only": "^0.8.1", + "superagent": "^7.1.6" + } + }, "node_modules/path-scurry": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", @@ -10550,9 +10997,9 @@ "license": "MIT" }, "node_modules/pathval": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", "dev": true, "license": "MIT", "engines": { @@ -10617,9 +11064,9 @@ } }, "node_modules/postcss": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", - "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -10637,7 +11084,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -10714,6 +11161,19 @@ "node": ">=10" } }, + "node_modules/postman-collection/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postman-collection/node_modules/semver": { "version": "7.6.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", @@ -10799,6 +11259,19 @@ "dev": true, "license": "MIT" }, + "node_modules/postman-runtime/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/postman-runtime/node_modules/postman-collection": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/postman-collection/-/postman-collection-4.4.0.tgz", @@ -10843,15 +11316,28 @@ "resolved": "https://registry.npmjs.org/postman-sandbox/-/postman-sandbox-4.7.1.tgz", "integrity": "sha512-H2wYSLK0mB588IaxoLrLoPbpmxsIcwFtgaK2c8gAsAQ+TgYFePwb4qdeVcYDMqmwrLd77/ViXkjasP/sBMz1sQ==", "dev": true, - "license": "Apache-2.0", + "license": "Apache-2.0", + "dependencies": { + "lodash": "4.17.21", + "postman-collection": "4.4.0", + "teleport-javascript": "1.0.0", + "uvm": "2.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/postman-sandbox/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", "dependencies": { - "lodash": "4.17.21", - "postman-collection": "4.4.0", - "teleport-javascript": "1.0.0", - "uvm": "2.1.1" + "safer-buffer": ">= 2.1.2 < 3.0.0" }, "engines": { - "node": ">=10" + "node": ">=0.10.0" } }, "node_modules/postman-sandbox/node_modules/postman-collection": { @@ -11060,83 +11546,6 @@ ], "license": "MIT" }, - "node_modules/read-package-up": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/read-package-up/-/read-package-up-11.0.0.tgz", - "integrity": "sha512-MbgfoNPANMdb4oRBNg5eqLbB2t2r+o5Ua1pNt8BqGp4I0FJZhuVSOj3PaBPni4azWuSzEdNn2evevzVmEk1ohQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "find-up-simple": "^1.0.0", - "read-pkg": "^9.0.0", - "type-fest": "^4.6.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-package-up/node_modules/type-fest": { - "version": "4.38.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.38.0.tgz", - "integrity": "sha512-2dBz5D5ycHIoliLYLi0Q2V7KRaDlH0uWIvmk7TYlAg5slqwiPv1ezJdZm1QEM0xgk29oYWMCbIG7E6gHpvChlg==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-9.0.1.tgz", - "integrity": "sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.3", - "normalize-package-data": "^6.0.0", - "parse-json": "^8.0.0", - "type-fest": "^4.6.0", - "unicorn-magic": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg/node_modules/type-fest": { - "version": "4.38.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.38.0.tgz", - "integrity": "sha512-2dBz5D5ycHIoliLYLi0Q2V7KRaDlH0uWIvmk7TYlAg5slqwiPv1ezJdZm1QEM0xgk29oYWMCbIG7E6gHpvChlg==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg/node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", @@ -11239,6 +11648,97 @@ "node": ">=6" } }, + "node_modules/request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/request/node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, + "node_modules/request/node_modules/jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/request/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -11304,67 +11804,14 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz", - "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/rollup": { - "version": "4.37.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz", - "integrity": "sha512-iAtQy/L4QFU+rTJ1YUjXqJOJzuwEghqWzCEYD2FEghT7Gsy1VdABntrO4CLopA5IkflTyqNiLNwPcOJ3S7UKLg==", + "version": "4.44.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.44.1.tgz", + "integrity": "sha512-x8H8aPvD+xbl0Do8oez5f5o8eMS3trfCghc4HhLAnCkj7Vl0d1JWGs0UF/D886zLW2rOj2QymV/JcSSsw+XDNg==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -11374,36 +11821,29 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.37.0", - "@rollup/rollup-android-arm64": "4.37.0", - "@rollup/rollup-darwin-arm64": "4.37.0", - "@rollup/rollup-darwin-x64": "4.37.0", - "@rollup/rollup-freebsd-arm64": "4.37.0", - "@rollup/rollup-freebsd-x64": "4.37.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.37.0", - "@rollup/rollup-linux-arm-musleabihf": "4.37.0", - "@rollup/rollup-linux-arm64-gnu": "4.37.0", - "@rollup/rollup-linux-arm64-musl": "4.37.0", - "@rollup/rollup-linux-loongarch64-gnu": "4.37.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.37.0", - "@rollup/rollup-linux-riscv64-gnu": "4.37.0", - "@rollup/rollup-linux-riscv64-musl": "4.37.0", - "@rollup/rollup-linux-s390x-gnu": "4.37.0", - "@rollup/rollup-linux-x64-gnu": "4.37.0", - "@rollup/rollup-linux-x64-musl": "4.37.0", - "@rollup/rollup-win32-arm64-msvc": "4.37.0", - "@rollup/rollup-win32-ia32-msvc": "4.37.0", - "@rollup/rollup-win32-x64-msvc": "4.37.0", + "@rollup/rollup-android-arm-eabi": "4.44.1", + "@rollup/rollup-android-arm64": "4.44.1", + "@rollup/rollup-darwin-arm64": "4.44.1", + "@rollup/rollup-darwin-x64": "4.44.1", + "@rollup/rollup-freebsd-arm64": "4.44.1", + "@rollup/rollup-freebsd-x64": "4.44.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.44.1", + "@rollup/rollup-linux-arm-musleabihf": "4.44.1", + "@rollup/rollup-linux-arm64-gnu": "4.44.1", + "@rollup/rollup-linux-arm64-musl": "4.44.1", + "@rollup/rollup-linux-loongarch64-gnu": "4.44.1", + "@rollup/rollup-linux-powerpc64le-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-gnu": "4.44.1", + "@rollup/rollup-linux-riscv64-musl": "4.44.1", + "@rollup/rollup-linux-s390x-gnu": "4.44.1", + "@rollup/rollup-linux-x64-gnu": "4.44.1", + "@rollup/rollup-linux-x64-musl": "4.44.1", + "@rollup/rollup-win32-arm64-msvc": "4.44.1", + "@rollup/rollup-win32-ia32-msvc": "4.44.1", + "@rollup/rollup-win32-x64-msvc": "4.44.1", "fsevents": "~2.3.2" } }, - "node_modules/rollup/node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true, - "license": "MIT" - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -11529,9 +11969,9 @@ "license": "ISC" }, "node_modules/semver": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "license": "ISC", "bin": { @@ -11620,6 +12060,25 @@ "node": ">= 0.4" } }, + "node_modules/set-value": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz", + "integrity": "sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw==", + "dev": true, + "funding": [ + "https://github.com/sponsors/jonschlinkert", + "https://paypal.me/jonathanschlinkert", + "https://jonschlinkert.dev/sponsor" + ], + "license": "MIT", + "dependencies": { + "is-plain-object": "^2.0.4", + "is-primitive": "^3.0.1" + }, + "engines": { + "node": ">=11.0" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -11643,6 +12102,19 @@ "node": ">=8" } }, + "node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/shellwords": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/shellwords/-/shellwords-0.1.1.tgz", @@ -11760,15 +12232,15 @@ } }, "node_modules/simple-git": { - "version": "3.27.0", - "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.27.0.tgz", - "integrity": "sha512-ivHoFS9Yi9GY49ogc6/YAi3Fl9ROnF4VyubNylgCkA+RVqLaKWnDSzXOVzya8csELIaWaYNutsEuAhZrtOjozA==", + "version": "3.28.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.28.0.tgz", + "integrity": "sha512-Rs/vQRwsn1ILH1oBUy8NucJlXmnnLeLCfcvbSehkPzbv3wwoFWIdtfd6Ndo6ZPhlPsCZ60CPI4rxurnwAa+a2w==", "dev": true, "license": "MIT", "dependencies": { "@kwsites/file-exists": "^1.1.1", "@kwsites/promise-deferred": "^1.1.1", - "debug": "^4.3.5" + "debug": "^4.4.0" }, "funding": { "type": "github", @@ -11818,42 +12290,6 @@ "node": ">=0.10.0" } }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.21", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz", - "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==", - "dev": true, - "license": "CC0-1.0" - }, "node_modules/sponge-case": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/sponge-case/-/sponge-case-2.0.3.tgz", @@ -11911,12 +12347,26 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.1.tgz", - "integrity": "sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", "dev": true, "license": "MIT" }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/stream-length": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/stream-length/-/stream-length-1.0.2.tgz", @@ -12161,36 +12611,96 @@ "node": ">=18" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-indent": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", + "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" } }, - "node_modules/strip-indent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", - "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/superagent": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.6.tgz", + "integrity": "sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g==", + "deprecated": "Please upgrade to v9.0.0+ as we have fixed a public vulnerability with formidable dependency. Note that v9.0.0+ requires Node.js v14.18.0+. See https://github.com/ladjs/superagent/pull/1800 for insight. This project is supported and maintained by the team at Forward Email @ https://forwardemail.net", "dev": true, "license": "MIT", "dependencies": { - "min-indent": "^1.0.1" + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=6.4.0 <13 || >=14" } }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "node_modules/superagent/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", "dev": true, - "license": "MIT", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, "engines": { - "node": ">=8" + "node": ">=0.6" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/supports-color": { @@ -12259,19 +12769,19 @@ "license": "ISC" }, "node_modules/temporal-polyfill": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.2.5.tgz", - "integrity": "sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.3.0.tgz", + "integrity": "sha512-qNsTkX9K8hi+FHDfHmf22e/OGuXmfBm9RqNismxBrnSmZVJKegQ+HYYXT+R7Ha8F/YSm2Y34vmzD4cxMu2u95g==", "dev": true, "license": "MIT", "dependencies": { - "temporal-spec": "^0.2.4" + "temporal-spec": "0.3.0" } }, "node_modules/temporal-spec": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.2.4.tgz", - "integrity": "sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.3.0.tgz", + "integrity": "sha512-n+noVpIqz4hYgFSMOSiINNOUOMFtV5cZQNCmmszA6GiVFVRt3G7AqVyhXjhCSmowvQn+NsGn+jMDMKJYHd3bSQ==", "dev": true, "license": "ISC" }, @@ -12290,6 +12800,16 @@ "node": ">=18" } }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/test-exclude/node_modules/glob": { "version": "10.4.5", "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", @@ -12361,10 +12881,55 @@ "dev": true, "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", "dev": true, "license": "MIT", "engines": { @@ -12382,9 +12947,9 @@ } }, "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", "dev": true, "license": "MIT", "engines": { @@ -12434,6 +12999,20 @@ "dev": true, "license": "MIT" }, + "node_modules/tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", @@ -12442,9 +13021,9 @@ "license": "MIT" }, "node_modules/tree-dump": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tree-dump/-/tree-dump-1.0.2.tgz", - "integrity": "sha512-dpev9ABuLWdEubk+cIaI9cHwRNNDjkBBLXTwI4UCUFdQ5xXKqNXoK4FEciw/vxf+NQ7Cb7sGUyeUtORvHIdRXQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tree-dump/-/tree-dump-1.0.3.tgz", + "integrity": "sha512-il+Cv80yVHFBwokQSfd4bldvr1Md951DpgAGfmhydt04L+YzHgubm2tQ7zueWDcGENKHq0ZvGFR/hjvNXilHEg==", "dev": true, "license": "Apache-2.0", "engines": { @@ -12498,6 +13077,19 @@ "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", @@ -12610,9 +13202,9 @@ } }, "node_modules/typescript": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", - "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -12624,15 +13216,15 @@ } }, "node_modules/typescript-eslint": { - "version": "8.28.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.28.0.tgz", - "integrity": "sha512-jfZtxJoHm59bvoCMYCe2BM0/baMswRhMmYhy+w6VfcyHrjxZ0OJe0tGasydCpIpA+A/WIJhTyZfb3EtwNC/kHQ==", + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.35.0.tgz", + "integrity": "sha512-uEnz70b7kBz6eg/j0Czy6K5NivaYopgxRjsnAJ2Fx5oTLo3wefTHIbL7AkQr1+7tJCRVpTs/wiM8JR/11Loq9A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.28.0", - "@typescript-eslint/parser": "8.28.0", - "@typescript-eslint/utils": "8.28.0" + "@typescript-eslint/eslint-plugin": "8.35.0", + "@typescript-eslint/parser": "8.35.0", + "@typescript-eslint/utils": "8.35.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -12687,9 +13279,9 @@ "license": "MIT" }, "node_modules/undici-types": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.28.4.tgz", - "integrity": "sha512-3OeMF5Lyowe8VW0skf5qaIE7Or3yS9LS7fvMUI0gg4YxpIBVg0L8BxCmROw2CcYhSkpR68Epz7CGc8MPj94Uww==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "dev": true, "license": "MIT" }, @@ -12825,28 +13417,17 @@ "node": ">=10" } }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", - "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true, - "license": "MIT" - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "node_modules/uvm/node_modules/flatted": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz", + "integrity": "sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==", "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } + "license": "ISC" }, "node_modules/validator": { - "version": "13.15.0", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.15.0.tgz", - "integrity": "sha512-36B2ryl4+oL5QxZ3AzD0t5SsMNGvTtQHpjgFO5tbNxfXbMFkY822ktCDe1MnlqV3301QQI9SLHDNJokDI+Z9pA==", + "version": "13.15.15", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.15.15.tgz", + "integrity": "sha512-BgWVbCI72aIQy937xbawcs+hrVaN/CZ2UwutgaJ36hGqRrLNM+f5LUT/YPRbo8IV/ASeFzXszezV+y2+rq3l8A==", "dev": true, "license": "MIT", "engines": { @@ -12868,66 +13449,25 @@ "extsprintf": "^1.2.0" } }, - "node_modules/vite-node": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.9.tgz", - "integrity": "sha512-w3Gdx7jDcuT9cNn9jExXgOyKmf5UOTb6WMHz8LGAm54eS1Elf5OuBhCxl6zJxGhEeIkgsE1WbHuoL0mj/UXqXg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.4.0", - "es-module-lexer": "^1.6.0", - "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite-node/node_modules/@types/node": { - "version": "22.13.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.13.tgz", - "integrity": "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "undici-types": "~6.20.0" - } - }, - "node_modules/vite-node/node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, - "node_modules/vite-node/node_modules/vite": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz", - "integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==", + "node_modules/vite": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.0.0.tgz", + "integrity": "sha512-ixXJB1YRgDIw2OszKQS9WxGHKwLdCsbQNkpJN171udl6szi/rIySHL6/Os3s2+oE4P/FLD4dxg4mD7Wust+u5g==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", - "postcss": "^8.5.3", - "rollup": "^4.30.1" + "fdir": "^6.4.6", + "picomatch": "^4.0.2", + "postcss": "^8.5.6", + "rollup": "^4.40.0", + "tinyglobby": "^0.2.14" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -12936,14 +13476,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -12984,6 +13524,143 @@ } } }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite/node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/vscode-jsonrpc": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-3.6.2.tgz", @@ -13265,9 +13942,9 @@ "license": "MIT" }, "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, "license": "MIT", "dependencies": { @@ -13276,10 +13953,7 @@ "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + "node": ">=8" } }, "node_modules/wrap-ansi-cjs": { @@ -13419,16 +14093,16 @@ } }, "node_modules/yaml": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", - "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.0.tgz", + "integrity": "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==", "dev": true, "license": "ISC", "bin": { "yaml": "bin.mjs" }, "engines": { - "node": ">= 14" + "node": ">= 14.6" } }, "node_modules/yargs": { @@ -13460,16 +14134,6 @@ "node": ">=12" } }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", @@ -13542,9 +14206,9 @@ } }, "node_modules/zod": { - "version": "3.24.2", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz", - "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", + "version": "3.25.67", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.67.tgz", + "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==", "dev": true, "license": "MIT", "funding": { @@ -13552,16 +14216,16 @@ } }, "node_modules/zod-validation-error": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-3.4.0.tgz", - "integrity": "sha512-ZOPR9SVY6Pb2qqO5XHt+MkkTRxGXb4EVtnjc9JpXUOtUB1T9Ru7mZOT361AN3MsetVe7R0a1KZshJDZdgp9miQ==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-3.5.2.tgz", + "integrity": "sha512-mdi7YOLtram5dzJ5aDtm1AG9+mxRma1iaMrZdYIpFO7epdKBUwLHIxTF8CPDeCQ828zAXYtizrKlEJAtzgfgrw==", "dev": true, "license": "MIT", "engines": { "node": ">=18.0.0" }, "peerDependencies": { - "zod": "^3.18.0" + "zod": "^3.25.0" } } } diff --git a/package.json b/package.json index 521030df86f6..cca2f76ea8f5 100644 --- a/package.json +++ b/package.json @@ -1,34 +1,39 @@ { "name": "azure-rest-api-specs", "devDependencies": { - "@azure-tools/spec-gen-sdk": "^0.3.2", - "@azure-tools/typespec-apiview": "0.6.0", - "@azure-tools/typespec-autorest": "0.53.0", - "@azure-tools/typespec-azure-core": "0.53.0", - "@azure-tools/typespec-azure-portal-core": "0.53.0", - "@azure-tools/typespec-azure-resource-manager": "0.53.0", - "@azure-tools/typespec-azure-rulesets": "0.53.0", - "@azure-tools/typespec-client-generator-cli": "0.16.0", - "@azure-tools/typespec-client-generator-core": "0.53.1", + "@azure-tools/spec-gen-sdk": "~0.8.0", + "@azure-tools/specs-shared": "file:.github/shared", + "@azure-tools/typespec-apiview": "0.7.2", + "@azure-tools/typespec-autorest": "0.57.1", + "@azure-tools/typespec-azure-core": "0.57.0", + "@azure-tools/typespec-azure-portal-core": "0.57.0", + "@azure-tools/typespec-azure-resource-manager": "0.57.2", + "@azure-tools/typespec-azure-rulesets": "0.57.1", + "@azure-tools/typespec-client-generator-cli": "0.23.0", + "@azure-tools/typespec-client-generator-core": "0.57.3", "@azure-tools/typespec-liftr-base": "0.8.0", - "@autorest/openapi-to-typespec": "0.10.13", + "@autorest/openapi-to-typespec": "0.11.2", "@azure/avocado": "^0.9.1", - "@typespec/compiler": "0.67.2", - "@typespec/http": "0.67.1", - "@typespec/sse": "0.67.1", - "@typespec/events": "0.67.1", - "@typespec/openapi": "0.67.1", - "@typespec/openapi3": "0.67.1", - "@typespec/prettier-plugin-typespec": "0.67.1", - "@typespec/rest": "0.67.1", - "@typespec/streams": "0.67.1", - "@typespec/versioning": "0.67.1", - "@typespec/xml": "0.67.1", + "@typespec/compiler": "1.1.0", + "@typespec/http": "1.1.0", + "@typespec/sse": "0.71.0", + "@typespec/events": "0.71.0", + "@typespec/openapi": "1.1.0", + "@typespec/openapi3": "1.1.0", + "@typespec/prettier-plugin-typespec": "1.1.0", + "@typespec/rest": "0.71.0", + "@typespec/streams": "0.71.0", + "@typespec/versioning": "0.71.0", + "@typespec/xml": "0.71.0", "azure-rest-api-specs-eng-tools": "file:eng/tools", - "oav": "^3.5.1", + "oav": "^3.6.1", "prettier": "~3.5.3", "typescript": "~5.8.2" }, + "overrides": { + "@typespec/asset-emitter": "0.71.0", + "jsonpath-plus": "^10.3.0" + }, "engines": { "node": ">=20.0.0", "npm": ">=10.0.0" diff --git a/specification/terraform/resource-manager/readme.go.md b/specification/terraform/resource-manager/readme.go.md deleted file mode 100644 index 33ea79404bdd..000000000000 --- a/specification/terraform/resource-manager/readme.go.md +++ /dev/null @@ -1,11 +0,0 @@ -## Go - -These settings apply only when `--go` is specified on the command line. - -```yaml $(go) && $(track2) -azure-arm: true -license-header: MICROSOFT_MIT_NO_VERSION -module-name: sdk/resourcemanager/terraform/armterraform -module: github.com/Azure/azure-sdk-for-go/$(module-name) -output-folder: $(go-sdk-folder)/$(module-name) -```