diff --git a/.azure/pipelines/ci-public.yml b/.azure/pipelines/ci-public.yml index 9bcb4699e93a..3d823e234dc6 100644 --- a/.azure/pipelines/ci-public.yml +++ b/.azure/pipelines/ci-public.yml @@ -446,7 +446,7 @@ stages: jobName: Linux_musl_x64_build jobDisplayName: "Build: Linux Musl x64" agentOs: Linux - container: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-WithNode + container: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-build-amd64 buildArgs: --arch x64 --os-name linux-musl @@ -480,7 +480,7 @@ stages: jobDisplayName: "Build: Linux Musl ARM" agentOs: Linux useHostedUbuntu: false - container: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine + container: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-build-amd64 buildArgs: --arch arm --os-name linux-musl @@ -513,7 +513,7 @@ stages: jobDisplayName: "Build: Linux Musl ARM64" agentOs: Linux useHostedUbuntu: false - container: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64-alpine + container: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-build-amd64 buildArgs: --arch arm64 --os-name linux-musl @@ -645,7 +645,7 @@ stages: parameters: platform: name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-build-amd64' buildScript: './eng/build.sh --publish --no-build-repo-tasks $(_PublishArgs) $(_InternalRuntimeDownloadArgs)' skipPublishValidation: true jobProperties: diff --git a/.azure/pipelines/ci.yml b/.azure/pipelines/ci.yml index d9f2afd0e9b6..08eab9052b12 100644 --- a/.azure/pipelines/ci.yml +++ b/.azure/pipelines/ci.yml @@ -97,14 +97,14 @@ variables: - name: WindowsArm64InstallersLogArgs value: /bl:artifacts/log/Release/Build.Installers.Arm64.binlog - name: _InternalRuntimeDownloadArgs - value: -RuntimeSourceFeed https://dotnetbuilds.blob.core.windows.net/internal + value: -RuntimeSourceFeed https://ci.dot.net/internal -RuntimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) /p:DotNetAssetRootAccessTokenSuffix='$(dotnetbuilds-internal-container-read-token-base64)' # The code signing doesn't use the aspnet build scripts, so the msbuild parameters have to be passed directly. This # is awkward but necessary because the eng/common/ build scripts don't add the msbuild properties automatically. - name: _InternalRuntimeDownloadCodeSignArgs value: $(_InternalRuntimeDownloadArgs) - /p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal + /p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) - group: DotNet-HelixApi-Access - ${{ if notin(variables['Build.Reason'], 'PullRequest') }}: @@ -149,12 +149,8 @@ extends: tsa: enabled: true containers: - alpine319WithNode: - image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-WithNode - mariner20CrossArmAlpine: - image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine - mariner20CrossArm64Alpine: - image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64-alpine + azureLinux30Net9BuildAmd64: + image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-build-amd64 stages: - stage: build displayName: Build @@ -515,7 +511,7 @@ extends: jobName: Linux_musl_x64_build jobDisplayName: "Build: Linux Musl x64" agentOs: Linux - container: alpine319WithNode + container: azureLinux30Net9BuildAmd64 buildArgs: --arch x64 --os-name linux-musl @@ -549,7 +545,7 @@ extends: jobDisplayName: "Build: Linux Musl ARM" agentOs: Linux useHostedUbuntu: false - container: mariner20CrossArmAlpine + container: azureLinux30Net9BuildAmd64 buildArgs: --arch arm --os-name linux-musl @@ -582,7 +578,7 @@ extends: jobDisplayName: "Build: Linux Musl ARM64" agentOs: Linux useHostedUbuntu: false - container: mariner20CrossArm64Alpine + container: azureLinux30Net9BuildAmd64 buildArgs: --arch arm64 --os-name linux-musl diff --git a/.azure/pipelines/identitymodel-helix-matrix.yml b/.azure/pipelines/identitymodel-helix-matrix.yml index 9159fe350b7e..48bba5e63274 100644 --- a/.azure/pipelines/identitymodel-helix-matrix.yml +++ b/.azure/pipelines/identitymodel-helix-matrix.yml @@ -26,7 +26,7 @@ resources: ref: refs/tags/release extends: - template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines + template: v1/1ES.Unofficial.PipelineTemplate.yml@1esPipelines parameters: sdl: sourceAnalysisPool: diff --git a/.azure/pipelines/jobs/default-build.yml b/.azure/pipelines/jobs/default-build.yml index 65307d5d0ad7..15cf2272358c 100644 --- a/.azure/pipelines/jobs/default-build.yml +++ b/.azure/pipelines/jobs/default-build.yml @@ -109,10 +109,10 @@ jobs: vmImage: macOS-13 ${{ if eq(parameters.agentOs, 'Linux') }}: ${{ if eq(parameters.useHostedUbuntu, true) }}: - vmImage: ubuntu-20.04 + vmImage: ubuntu-22.04 ${{ else }}: name: $(DncEngPublicBuildPool) - demands: ImageOverride -equals Build.Ubuntu.2004.Amd64.Open + demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open ${{ if eq(parameters.agentOs, 'Windows') }}: name: $(DncEngPublicBuildPool) demands: ImageOverride -equals windows.vs2022preview.amd64.open @@ -327,7 +327,7 @@ jobs: os: macOS ${{ if eq(parameters.agentOs, 'Linux') }}: name: $(DncEngInternalBuildPool) - image: 1es-ubuntu-2004 + image: 1es-ubuntu-2204 os: linux ${{ if eq(parameters.agentOs, 'Windows') }}: name: $(DncEngInternalBuildPool) diff --git a/NuGet.config b/NuGet.config index 9fa908668f8f..cabc4d8508a0 100644 --- a/NuGet.config +++ b/NuGet.config @@ -4,10 +4,10 @@ - + - + @@ -30,10 +30,10 @@ - + - + diff --git a/eng/Baseline.Designer.props b/eng/Baseline.Designer.props index f566358b9acb..c028a1e628ea 100644 --- a/eng/Baseline.Designer.props +++ b/eng/Baseline.Designer.props @@ -2,117 +2,117 @@ $(MSBuildAllProjects);$(MSBuildThisFileFullPath) - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 @@ -120,279 +120,279 @@ - 9.0.0 + 9.0.9 - - - + + + - - - + + + - - - + + + - 9.0.0 + 9.0.9 - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - - - - - + + + + + - 9.0.0 + 9.0.9 - - - - - + + + + + - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - - - - - + + + + + + - 9.0.0 + 9.0.9 - - - + + + - 9.0.0 + 9.0.9 - - - + + + - + - - - + + + - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - + - + - 9.0.0 + 9.0.9 - - - - - - + + + + + + - + - - - - - - - + + + + + + + - - - - - - + + + + + + - + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - - + + - - + + - - + + - 9.0.0 + 9.0.9 - + - + - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 @@ -401,83 +401,83 @@ - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - - - + + + - + - - - - + + + + - - - - + + + + - - - - + + + + - 9.0.0 + 9.0.9 - - + + - + - - + + - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 @@ -493,510 +493,510 @@ - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - + + - - + + - - + + - 9.0.0 + 9.0.9 - - - - - - + + + + + + - - - - - + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - 9.0.0 + 9.0.9 - - + + - + - - + + - - - + + + - 9.0.0 + 9.0.9 - + - + - + - 9.0.0 + 9.0.9 - + - + - + - 9.0.0 + 9.0.9 - + - + - + - 9.0.0 + 9.0.9 - - - - + + + + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - + + - - + + - - + + - 9.0.0 + 9.0.9 - - - + + + - - - + + + - - - + + + - - - + + + - 9.0.0 + 9.0.9 - - + + - - + + - - + + - 9.0.0 + 9.0.9 - - - - - + + + + + - - - - + + + + - - - - - + + + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - + - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - - - + + + - - - + + + - - - + + + - 9.0.0 + 9.0.9 - - - + + + - - - + + + - - - + + + - 9.0.0 + 9.0.9 - - - - + + + + - - - - + + + + - - - - + + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - - - - + + + + + - - - - - + + + + + - - - - - + + + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - - - + + + - - + + - - - + + + - 9.0.0 + 9.0.9 - 9.0.0 + 9.0.9 - + - 9.0.0 + 9.0.9 - + \ No newline at end of file diff --git a/eng/Baseline.xml b/eng/Baseline.xml index 1b4ce55e8e33..3ebda983fc82 100644 --- a/eng/Baseline.xml +++ b/eng/Baseline.xml @@ -4,110 +4,110 @@ This file contains a list of all the packages and their versions which were rele Update this list when preparing for a new patch. --> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/eng/SourceBuildPrebuiltBaseline.xml b/eng/SourceBuildPrebuiltBaseline.xml index 8e86375464ef..2c8d20d3234b 100644 --- a/eng/SourceBuildPrebuiltBaseline.xml +++ b/eng/SourceBuildPrebuiltBaseline.xml @@ -46,6 +46,13 @@ + + + + + + + diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index b87535c6b679..72b4a55be806 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -9,334 +9,334 @@ --> - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-efcore - 480480b57cd6e43fe5cab1b552ac0ef917bf3fe8 + 5452ff90a79084afd23df379388ae8bca24284f3 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://github.com/dotnet/xdt - 1a54480f52703fb45fac2a6b955247d33758383e + 63ae81154c50a1cf9287cc47d8351d55b4289e6d - + https://github.com/dotnet/xdt - 1a54480f52703fb45fac2a6b955247d33758383e + 63ae81154c50a1cf9287cc47d8351d55b4289e6d @@ -367,60 +367,60 @@ bc1c3011064a493b0ca527df6fb7215e2e5cfa96 - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 - + https://github.com/dotnet/source-build-externals - c65b1c1affed1f4847f9c3f81623dfa929d21e1a + ab469606a3e6b026dcac301e2dab96117c94faeb - + https://dev.azure.com/dnceng/internal/_git/dotnet-runtime - c8acea22626efab11c13778c028975acdc34678f + e1f19886fe3354963a4a790c896b3f99689fd7a5 https://github.com/dotnet/winforms 9b822fd70005bf5632d12fe76811b97b3dd044e4 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/arcade - b41381d5cd633471265e9cd72e933a7048e03062 + 43df065432cbc74c0fa44d7569a0c31f64883f03 - + https://github.com/dotnet/extensions - cfed375f3161f2e553e946b4f968b818e8e858f1 + 3645ccc33d29294341654cc37d0c448ed47c836c - + https://github.com/dotnet/extensions - cfed375f3161f2e553e946b4f968b818e8e858f1 + 3645ccc33d29294341654cc37d0c448ed47c836c https://github.com/nuget/nuget.client diff --git a/eng/Versions.props b/eng/Versions.props index 3d2831de877a..910f240ecf2a 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -8,7 +8,7 @@ 9 0 - 1 + 10 true @@ -19,8 +19,8 @@ --> true release - rtm - RTM + servicing + Servicing true false $(AspNetCoreMajorVersion).$(AspNetCoreMinorVersion) @@ -68,92 +68,92 @@ --> - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1-servicing.24610.10 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1-servicing.24610.10 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1-servicing.24610.10 - 9.0.1-servicing.24610.10 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10-servicing.25475.15 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10-servicing.25475.15 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10-servicing.25475.15 + 9.0.10-servicing.25475.15 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 - 9.0.1-servicing.24610.10 - 9.0.1 + 9.0.10-servicing.25475.15 + 9.0.10 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 - 9.1.0-preview.1.24575.1 - 9.1.0-preview.1.24575.1 + 9.10.0-preview.1.25456.3 + 9.10.0-preview.1.25456.3 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 - 9.0.1 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 + 9.0.10 4.11.0-3.24554.2 4.11.0-3.24554.2 @@ -166,12 +166,12 @@ 6.2.4 6.2.4 - 9.0.0-beta.24572.2 - 9.0.0-beta.24572.2 - 9.0.0-beta.24572.2 - 9.0.0-beta.24572.2 + 9.0.0-beta.25460.1 + 9.0.0-beta.25460.1 + 9.0.0-beta.25460.1 + 9.0.0-beta.25460.1 - 9.0.0-alpha.1.24568.2 + 9.0.0-alpha.1.24575.1 9.0.0-alpha.1.24413.1 @@ -179,8 +179,8 @@ 9.0.0-rtm.24512.2 - 9.0.0-preview.24522.2 - 9.0.0-preview.24522.2 + 10.0.0-preview.24609.2 + 10.0.0-preview.24609.2 1.1.1 - 17.4.0 + 17.8.29 1.2.0 - 17.4.0 - 17.4.0 - 17.4.0 + 17.8.29 + 17.8.29 + 17.8.29 1.2.6 - 17.4.0 diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index 90b58e32a87b..a261517ef906 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -5,7 +5,8 @@ param( [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, - [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters + [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters, + [Parameter(Mandatory=$false)][string] $RequireDefaultChannels ) try { @@ -33,6 +34,10 @@ try { if ("false" -eq $WaitPublishingFinish) { $optionalParams.Add("--no-wait") | Out-Null } + + if ("true" -eq $RequireDefaultChannels) { + $optionalParams.Add("--default-channels-required") | Out-Null + } & $darc add-build-to-channel ` --id $buildId ` diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md index 5ef6c30ba924..4bf4cf41bd7c 100644 --- a/eng/common/template-guidance.md +++ b/eng/common/template-guidance.md @@ -50,14 +50,14 @@ extends: - task: CopyFiles@2 displayName: Gather build output inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' + SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel' Contents: '**' TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' ``` Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). -# Development notes +## Development notes **Folder / file structure** diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml index 605692d2fb77..81ea7a261f2d 100644 --- a/eng/common/templates-official/job/job.yml +++ b/eng/common/templates-official/job/job.yml @@ -3,7 +3,7 @@ parameters: enableSbom: true runAsPublic: false PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' jobs: - template: /eng/common/core-templates/job/job.yml @@ -16,6 +16,7 @@ jobs: parameters: PackageVersion: ${{ parameters.packageVersion }} BuildDropPath: ${{ parameters.buildDropPath }} + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom publishArtifacts: false # publish artifacts diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml index dbdd66d4a4b3..f1311bbb1b33 100644 --- a/eng/common/templates-official/variables/sdl-variables.yml +++ b/eng/common/templates-official/variables/sdl-variables.yml @@ -4,4 +4,4 @@ variables: - name: DefaultGuardianVersion value: 0.109.0 - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file + value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index d1aeb92fcea5..5bdd3dd85fd2 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -6,7 +6,7 @@ parameters: enableSbom: true runAsPublic: false PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' jobs: - template: /eng/common/core-templates/job/job.yml @@ -75,7 +75,7 @@ jobs: parameters: is1ESPipeline: false args: - targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration' artifactName: 'BuildConfiguration' displayName: 'Publish build retry configuration' continueOnError: true diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index aa94fb174596..9b3ad8840fdb 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -42,7 +42,7 @@ [bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1 +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1 [string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -262,7 +262,7 @@ function GetDotNetInstallScript([string] $dotnetRoot) { if (!(Test-Path $installScript)) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" + $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" Retry({ Write-Host "GET $uri" @@ -320,7 +320,7 @@ function InstallDotNet([string] $dotnetRoot, $variations += @($installParameters) $dotnetBuilds = $installParameters.Clone() - $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public" + $dotnetbuilds.AzureFeed = "https://ci.dot.net/public" $variations += @($dotnetBuilds) if ($runtimeSourceFeed) { @@ -416,7 +416,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # Locate Visual Studio installation or download x-copy msbuild. $vsInfo = LocateVisualStudio $vsRequirements - if ($vsInfo -ne $null) { + if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) { # Ensure vsInstallDir has a trailing slash $vsInstallDir = Join-Path $vsInfo.installationPath "\" $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 00473c9f918d..01b09b65796c 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true} use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -232,7 +232,7 @@ function InstallDotNet { local public_location=("${installParameters[@]}") variations+=(public_location) - local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public") + local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public") variations+=(dotnetbuilds) if [[ -n "${6:-}" ]]; then @@ -295,7 +295,7 @@ function with_retries { function GetDotNetInstallScript { local root=$1 local install_script="$root/dotnet-install.sh" - local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" + local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" if [[ ! -a "$install_script" ]]; then mkdir -p "$root" diff --git a/eng/helix/helix.proj b/eng/helix/helix.proj index f31e201d516e..a772993a3592 100644 --- a/eng/helix/helix.proj +++ b/eng/helix/helix.proj @@ -58,12 +58,12 @@ runtime - $([System.Environment]::GetEnvironmentVariable('DotNetBuildsInternalReadSasToken')) - $([System.Environment]::GetEnvironmentVariable('DotNetBuildsInternalReadSasToken')) diff --git a/eng/scripts/InstallJdk.ps1 b/eng/scripts/InstallJdk.ps1 index 0872f241a982..1ba711b5eaa4 100644 --- a/eng/scripts/InstallJdk.ps1 +++ b/eng/scripts/InstallJdk.ps1 @@ -22,8 +22,7 @@ $installDir = "$repoRoot\.tools\jdk\win-x64\" $javacExe = "$installDir\bin\javac.exe" $tempDir = "$repoRoot\obj" if (-not $JdkVersion) { - $globalJson = Get-Content "$repoRoot\global.json" | ConvertFrom-Json - $JdkVersion = $globalJson.'native-tools'.jdk + $JdkVersion = "11.0.24" } if (Test-Path $javacExe) { diff --git a/eng/scripts/install-nginx-linux.sh b/eng/scripts/install-nginx-linux.sh index bbfb79c48203..f075a899d1cf 100755 --- a/eng/scripts/install-nginx-linux.sh +++ b/eng/scripts/install-nginx-linux.sh @@ -6,7 +6,7 @@ scriptroot="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" reporoot="$(dirname "$(dirname "$scriptroot")")" nginxinstall="$reporoot/.tools/nginx" -curl -sSL http://nginx.org/download/nginx-1.14.2.tar.gz --retry 5 | tar zxfv - -C /tmp && cd /tmp/nginx-1.14.2/ +curl -sSL http://nginx.org/download/nginx-1.26.3.tar.gz --retry 5 | tar zxfv - -C /tmp && cd /tmp/nginx-1.26.3/ ./configure --prefix=$nginxinstall --with-http_ssl_module --without-http_rewrite_module make make install diff --git a/eng/targets/Helix.Common.props b/eng/targets/Helix.Common.props index cedd59cd34ad..1ba8ba99dbe3 100644 --- a/eng/targets/Helix.Common.props +++ b/eng/targets/Helix.Common.props @@ -2,9 +2,9 @@ (AlmaLinux.8.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:almalinux-8-helix-amd64 - (Alpine.318.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.18-helix-amd64 + (Alpine.321.Amd64.Open)azurelinux.3.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.21-helix-amd64 (Debian.12.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-12-helix-amd64 - (Fedora.38.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-38-helix + (Fedora.41.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-41-helix (Mariner)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-helix-amd64 (Debian.12.Arm64.Open)ubuntu.2204.armarch.open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-12-helix-arm64v8 @@ -29,7 +29,7 @@ - + @@ -42,14 +42,14 @@ - + - + - + diff --git a/eng/targets/Helix.targets b/eng/targets/Helix.targets index 43bf0cb7f7f1..0aab28ef20cc 100644 --- a/eng/targets/Helix.targets +++ b/eng/targets/Helix.targets @@ -17,9 +17,9 @@ $(HelixQueueAlmaLinux8); - $(HelixQueueAlpine318); + $(HelixQueueAlpine); $(HelixQueueDebian12); - $(HelixQueueFedora38); + $(HelixQueueFedora40); $(HelixQueueMariner); Ubuntu.2004.Amd64.Open; diff --git a/global.json b/global.json index e4674ed713f8..25bf23883f5d 100644 --- a/global.json +++ b/global.json @@ -1,9 +1,9 @@ { "sdk": { - "version": "9.0.100" + "version": "9.0.110" }, "tools": { - "dotnet": "9.0.100", + "dotnet": "9.0.110", "runtimes": { "dotnet/x86": [ "$(MicrosoftNETCoreBrowserDebugHostTransportVersion)" @@ -24,10 +24,10 @@ "xcopy-msbuild": "17.1.0" }, "native-tools": { - "jdk": "11.0.24" + "jdk": "latest" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24572.2", - "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24572.2" + "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.25460.1", + "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.25460.1" } } diff --git a/src/Caching/StackExchangeRedis/src/RedisCache.cs b/src/Caching/StackExchangeRedis/src/RedisCache.cs index debec0237040..95bba7dd0088 100644 --- a/src/Caching/StackExchangeRedis/src/RedisCache.cs +++ b/src/Caching/StackExchangeRedis/src/RedisCache.cs @@ -53,6 +53,8 @@ private static RedisValue[] GetHashFields(bool getData) => getData private long _firstErrorTimeTicks; private long _previousErrorTimeTicks; + internal virtual bool IsHybridCacheActive() => false; + // StackExchange.Redis will also be trying to reconnect internally, // so limit how often we recreate the ConnectionMultiplexer instance // in an attempt to reconnect @@ -375,6 +377,11 @@ private void TryAddSuffix(IConnectionMultiplexer connection) { connection.AddLibraryNameSuffix("aspnet"); connection.AddLibraryNameSuffix("DC"); + + if (IsHybridCacheActive()) + { + connection.AddLibraryNameSuffix("HC"); + } } catch (Exception ex) { diff --git a/src/Caching/StackExchangeRedis/src/RedisCacheImpl.cs b/src/Caching/StackExchangeRedis/src/RedisCacheImpl.cs index dab5bfc8655b..0a58d43e136d 100644 --- a/src/Caching/StackExchangeRedis/src/RedisCacheImpl.cs +++ b/src/Caching/StackExchangeRedis/src/RedisCacheImpl.cs @@ -1,6 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System; +using Microsoft.Extensions.Caching.Hybrid; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; @@ -8,13 +11,20 @@ namespace Microsoft.Extensions.Caching.StackExchangeRedis; internal sealed class RedisCacheImpl : RedisCache { - public RedisCacheImpl(IOptions optionsAccessor, ILogger logger) + private readonly IServiceProvider _services; + + internal override bool IsHybridCacheActive() + => _services.GetService() is not null; + + public RedisCacheImpl(IOptions optionsAccessor, ILogger logger, IServiceProvider services) : base(optionsAccessor, logger) { + _services = services; // important: do not check for HybridCache here due to dependency - creates a cycle } - public RedisCacheImpl(IOptions optionsAccessor) + public RedisCacheImpl(IOptions optionsAccessor, IServiceProvider services) : base(optionsAccessor) { + _services = services; // important: do not check for HybridCache here due to dependency - creates a cycle } } diff --git a/src/Caching/StackExchangeRedis/test/CacheServiceExtensionsTests.cs b/src/Caching/StackExchangeRedis/test/CacheServiceExtensionsTests.cs index 29a49a7cec70..71e31d19928a 100644 --- a/src/Caching/StackExchangeRedis/test/CacheServiceExtensionsTests.cs +++ b/src/Caching/StackExchangeRedis/test/CacheServiceExtensionsTests.cs @@ -1,11 +1,19 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System; +using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.Caching.Distributed; +using Microsoft.Extensions.Caching.Hybrid; +using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; using Moq; using Xunit; @@ -121,4 +129,71 @@ public void AddStackExchangeRedisCache_UsesLoggerFactoryAlreadyRegisteredWithSer loggerFactory.Verify(); } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void AddStackExchangeRedisCache_HybridCacheDetected(bool hybridCacheActive) + { + // Arrange + var services = new ServiceCollection(); + + services.AddLogging(); + + // Act + services.AddStackExchangeRedisCache(options => { }); + if (hybridCacheActive) + { + services.AddMemoryCache(); + services.TryAddSingleton(); + } + + using var provider = services.BuildServiceProvider(); + var cache = Assert.IsAssignableFrom(provider.GetRequiredService()); + Assert.Equal(hybridCacheActive, cache.IsHybridCacheActive()); + } + + sealed class DummyHybridCache : HybridCache + { + // emulate the layout from HybridCache in dotnet/extensions + public DummyHybridCache(IOptions options, IServiceProvider services) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + var l1 = services.GetRequiredService(); + _ = options.Value; + var logger = services.GetService()?.CreateLogger(typeof(HybridCache)) ?? NullLogger.Instance; + // var clock = services.GetService() ?? TimeProvider.System; + var l2 = services.GetService(); // note optional + + // ignore L2 if it is really just the same L1, wrapped + // (note not just an "is" test; if someone has a custom subclass, who knows what it does?) + if (l2 is not null + && l2.GetType() == typeof(MemoryDistributedCache) + && l1.GetType() == typeof(MemoryCache)) + { + l2 = null; + } + + IHybridCacheSerializerFactory[] factories = services.GetServices().ToArray(); + Array.Reverse(factories); + } + + public class HybridCacheOptions { } + + public override ValueTask GetOrCreateAsync(string key, TState state, Func> factory, HybridCacheEntryOptions options = null, IEnumerable tags = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public override ValueTask RemoveAsync(string key, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public override ValueTask RemoveByTagAsync(string tag, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public override ValueTask SetAsync(string key, T value, HybridCacheEntryOptions options = null, IEnumerable tags = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + } } diff --git a/src/Components/CustomElements/src/Microsoft.AspNetCore.Components.CustomElements.csproj b/src/Components/CustomElements/src/Microsoft.AspNetCore.Components.CustomElements.csproj index 4c5ae75fa8a4..5d27de7c059f 100644 --- a/src/Components/CustomElements/src/Microsoft.AspNetCore.Components.CustomElements.csproj +++ b/src/Components/CustomElements/src/Microsoft.AspNetCore.Components.CustomElements.csproj @@ -1,4 +1,4 @@ - + $(DefaultNetCoreTargetFramework) @@ -18,7 +18,7 @@ - + <_JsBuildOutput Include="$(InteropWorkingDir)dist\$(Configuration)\**" Exclude="$(InteropWorkingDir)dist\.gitignore" /> diff --git a/src/Components/WebAssembly/DevServer/src/Server/Startup.cs b/src/Components/WebAssembly/DevServer/src/Server/Startup.cs index 046031a29f79..a9870bf688fb 100644 --- a/src/Components/WebAssembly/DevServer/src/Server/Startup.cs +++ b/src/Components/WebAssembly/DevServer/src/Server/Startup.cs @@ -6,6 +6,7 @@ using Microsoft.AspNetCore.Http; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Net.Http.Headers; namespace Microsoft.AspNetCore.Components.WebAssembly.DevServer.Server; @@ -69,6 +70,14 @@ public static void Configure(IApplicationBuilder app, IConfiguration configurati { OnPrepareResponse = fileContext => { + // Avoid caching index.html during development. + // When hot reload is enabled, a middleware injects a hot reload script into the response HTML. + // We don't want the browser to bypass this injection by using a cached response that doesn't + // contain the injected script. In the future, if script injection is removed in favor of a + // different mechanism, we can delete this comment and the line below it. + // See also: https://github.com/dotnet/aspnetcore/issues/45213 + fileContext.Context.Response.Headers[HeaderNames.CacheControl] = "no-store"; + if (applyCopHeaders) { // Browser multi-threaded runtime requires cross-origin policy headers to enable SharedArrayBuffer. diff --git a/src/Components/test/E2ETest/ServerRenderingTests/EnhancedNavigationTest.cs b/src/Components/test/E2ETest/ServerRenderingTests/EnhancedNavigationTest.cs index 94b6b80f53a1..0ba10d655cdf 100644 --- a/src/Components/test/E2ETest/ServerRenderingTests/EnhancedNavigationTest.cs +++ b/src/Components/test/E2ETest/ServerRenderingTests/EnhancedNavigationTest.cs @@ -4,6 +4,7 @@ using Microsoft.AspNetCore.Components.E2ETest.Infrastructure.ServerFixtures; using Microsoft.AspNetCore.Components.E2ETest.Infrastructure; using Microsoft.AspNetCore.E2ETesting; +using Microsoft.AspNetCore.InternalTesting; using TestServer; using Xunit.Abstractions; using Components.TestServer.RazorComponents; @@ -519,6 +520,7 @@ public void NavigationManagerUriGetsUpdatedOnEnhancedNavigation_OnlyServerOrWebA [Theory] [InlineData("server")] [InlineData("wasm")] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/61143")] public void NavigationManagerUriGetsUpdatedOnEnhancedNavigation_BothServerAndWebAssembly(string runtimeThatInvokedNavigation) { Navigate($"{ServerPathBase}/nav"); diff --git a/src/Components/test/E2ETest/ServerRenderingTests/FormHandlingTests/FormWithParentBindingContextTest.cs b/src/Components/test/E2ETest/ServerRenderingTests/FormHandlingTests/FormWithParentBindingContextTest.cs index 4b35b0c5eed0..423b18bfc1dc 100644 --- a/src/Components/test/E2ETest/ServerRenderingTests/FormHandlingTests/FormWithParentBindingContextTest.cs +++ b/src/Components/test/E2ETest/ServerRenderingTests/FormHandlingTests/FormWithParentBindingContextTest.cs @@ -1287,7 +1287,7 @@ public void CanBindToFormWithFiles() } [Theory] - [InlineData(true)] + // [InlineData(true)] QuarantinedTest: https://github.com/dotnet/aspnetcore/issues/61882 [InlineData(false)] public void CanUseFormWithMethodGet(bool suppressEnhancedNavigation) { @@ -1344,19 +1344,19 @@ public void RadioButtonGetsResetAfterSubmittingEnhancedForm() { GoTo("forms/form-with-checkbox-and-radio-button"); - Assert.False(Browser.Exists(By.Id("checkbox")).Selected); - Assert.False(Browser.Exists(By.Id("radio-button")).Selected); + WaitAssert.False(Browser, () => Browser.Exists(By.Id("checkbox")).Selected); + WaitAssert.False(Browser, () => Browser.Exists(By.Id("radio-button")).Selected); Browser.Exists(By.Id("checkbox")).Click(); Browser.Exists(By.Id("radio-button")).Click(); - Assert.True(Browser.Exists(By.Id("checkbox")).Selected); - Assert.True(Browser.Exists(By.Id("radio-button")).Selected); + WaitAssert.True(Browser, () => Browser.Exists(By.Id("checkbox")).Selected); + WaitAssert.True(Browser, () => Browser.Exists(By.Id("radio-button")).Selected); Browser.Exists(By.Id("submit-button")).Click(); - Assert.False(Browser.Exists(By.Id("checkbox")).Selected); - Assert.False(Browser.Exists(By.Id("radio-button")).Selected); + WaitAssert.False(Browser, () => Browser.Exists(By.Id("checkbox")).Selected); + WaitAssert.False(Browser, () => Browser.Exists(By.Id("radio-button")).Selected); } [Fact] diff --git a/src/Framework/App.Runtime/src/Microsoft.AspNetCore.App.Runtime.csproj b/src/Framework/App.Runtime/src/Microsoft.AspNetCore.App.Runtime.csproj index 48725d72139d..5f488e03a398 100644 --- a/src/Framework/App.Runtime/src/Microsoft.AspNetCore.App.Runtime.csproj +++ b/src/Framework/App.Runtime/src/Microsoft.AspNetCore.App.Runtime.csproj @@ -100,12 +100,12 @@ This package is an internal implementation of the .NET Core SDK and is not meant PkgMicrosoft_NETCore_App_Runtime_$(RuntimeIdentifier.Replace('.', '_')) $(TargetOsName) - linux + linux $(TargetRuntimeIdentifier.Substring(0,$(TargetRuntimeIdentifier.IndexOf('-')))) x64 $(BuildArchitecture) @@ -560,9 +560,9 @@ This package is an internal implementation of the .NET Core SDK and is not meant - - - + + $(DotnetRuntimeSourceFeedKey) diff --git a/src/Http/Headers/test/CookieHeaderValueTest.cs b/src/Http/Headers/test/CookieHeaderValueTest.cs index 6623a8ed13dd..6ad2e962d005 100644 --- a/src/Http/Headers/test/CookieHeaderValueTest.cs +++ b/src/Http/Headers/test/CookieHeaderValueTest.cs @@ -75,7 +75,7 @@ public static TheoryData InvalidCookieValues } } - public static TheoryData, string?[]> ListOfCookieHeaderDataSet + public static TheoryData, string?[]> ListOfStrictCookieHeaderDataSet { get { @@ -94,19 +94,30 @@ public static TheoryData InvalidCookieValues dataset.Add(new[] { header1 }.ToList(), new[] { string1 }); dataset.Add(new[] { header1, header1 }.ToList(), new[] { string1, string1 }); - dataset.Add(new[] { header1, header1 }.ToList(), new[] { string1, null, "", " ", ";", " , ", string1 }); dataset.Add(new[] { header2 }.ToList(), new[] { string2 }); dataset.Add(new[] { header1, header2 }.ToList(), new[] { string1, string2 }); - dataset.Add(new[] { header1, header2 }.ToList(), new[] { string1 + ", " + string2 }); dataset.Add(new[] { header2, header1 }.ToList(), new[] { string2 + "; " + string1 }); dataset.Add(new[] { header1, header2, header3, header4 }.ToList(), new[] { string1, string2, string3, string4 }); - dataset.Add(new[] { header1, header2, header3, header4 }.ToList(), new[] { string.Join(",", string1, string2, string3, string4) }); dataset.Add(new[] { header1, header2, header3, header4 }.ToList(), new[] { string.Join(";", string1, string2, string3, string4) }); return dataset; } } + public static TheoryData, string?[]> ListOfCookieHeaderDataSet + { + get + { + var header1 = new CookieHeaderValue("name1", "n1=v1&n2=v2&n3=v3"); + var string1 = "name1=n1=v1&n2=v2&n3=v3"; + + var dataset = new TheoryData, string?[]>(); + dataset.Concat(ListOfStrictCookieHeaderDataSet); + dataset.Add(new[] { header1, header1 }.ToList(), new[] { string1, null, "", " ", ";", " , ", string1 }); + return dataset; + } + } + public static TheoryData?, string?[]> ListWithInvalidCookieHeaderDataSet { get @@ -127,18 +138,19 @@ public static TheoryData InvalidCookieValues dataset.Add(new[] { header1 }.ToList(), new[] { validString1, invalidString1 }); dataset.Add(new[] { header1 }.ToList(), new[] { validString1, null, "", " ", ";", " , ", invalidString1 }); dataset.Add(new[] { header1 }.ToList(), new[] { invalidString1, null, "", " ", ";", " , ", validString1 }); - dataset.Add(new[] { header1 }.ToList(), new[] { validString1 + ", " + invalidString1 }); - dataset.Add(new[] { header2 }.ToList(), new[] { invalidString1 + ", " + validString2 }); + dataset.Add(null, new[] { validString1 + ", " }); + dataset.Add(null, new[] { invalidString1 + ", " + validString2 }); dataset.Add(new[] { header1 }.ToList(), new[] { invalidString1 + "; " + validString1 }); dataset.Add(new[] { header2 }.ToList(), new[] { validString2 + "; " + invalidString1 }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { invalidString1, validString1, validString2, validString3 }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { validString1, invalidString1, validString2, validString3 }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { validString1, validString2, invalidString1, validString3 }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { validString1, validString2, validString3, invalidString1 }); - dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(",", invalidString1, validString1, validString2, validString3) }); - dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(",", validString1, invalidString1, validString2, validString3) }); - dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(",", validString1, validString2, invalidString1, validString3) }); - dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(",", validString1, validString2, validString3, invalidString1) }); + dataset.Add(null, new[] { string.Join(",", invalidString1, validString1, validString2, validString3) }); + dataset.Add(null, new[] { string.Join(",", validString1, invalidString1, validString2, validString3) }); + dataset.Add(null, new[] { string.Join(",", validString1, validString2, invalidString1, validString3) }); + dataset.Add(null, new[] { string.Join(",", validString1, validString2, validString3, invalidString1) }); + dataset.Add(null, new[] { string.Join(",", validString1, validString2, validString3) }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(";", invalidString1, validString1, validString2, validString3) }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(";", validString1, invalidString1, validString2, validString3) }); dataset.Add(new[] { header1, header2, header3 }.ToList(), new[] { string.Join(";", validString1, validString2, invalidString1, validString3) }); @@ -248,7 +260,7 @@ public void CookieHeaderValue_ParseList_AcceptsValidValues(IList cookies, string[] input) { var results = CookieHeaderValue.ParseStrictList(input); @@ -267,7 +279,7 @@ public void CookieHeaderValue_TryParseList_AcceptsValidValues(IList cookies, string[] input) { var result = CookieHeaderValue.TryParseStrictList(input, out var results); diff --git a/src/Http/Http.Extensions/test/HeaderDictionaryTypeExtensionsTest.cs b/src/Http/Http.Extensions/test/HeaderDictionaryTypeExtensionsTest.cs index 33ecc3ff1ea8..b4071866534b 100644 --- a/src/Http/Http.Extensions/test/HeaderDictionaryTypeExtensionsTest.cs +++ b/src/Http/Http.Extensions/test/HeaderDictionaryTypeExtensionsTest.cs @@ -214,7 +214,7 @@ public void GetListT_StringWithQualityHeaderValidValue_Success() public void GetListT_CookieHeaderValue_Success() { var context = new DefaultHttpContext(); - context.Request.Headers.Cookie = "cookie1=a,cookie2=b"; + context.Request.Headers.Cookie = "cookie1=a;cookie2=b"; var result = context.Request.GetTypedHeaders().GetList(HeaderNames.Cookie); diff --git a/src/Http/Http/test/RequestCookiesCollectionTests.cs b/src/Http/Http/test/RequestCookiesCollectionTests.cs index e08a53f29711..d584f2db0599 100644 --- a/src/Http/Http/test/RequestCookiesCollectionTests.cs +++ b/src/Http/Http/test/RequestCookiesCollectionTests.cs @@ -33,11 +33,18 @@ public void ParseManyCookies() [Theory] [InlineData(",", null)] [InlineData(";", null)] - [InlineData("er=dd,cc,bb", new[] { "dd" })] - [InlineData("er=dd,err=cc,errr=bb", new[] { "dd", "cc", "bb" })] - [InlineData("errorcookie=dd,:(\"sa;", new[] { "dd" })] + [InlineData("er=dd,cc,bb", null)] + [InlineData("er=dd,err=cc,errr=bb", null)] + [InlineData("errorcookie=dd,:(\"sa;", null)] [InlineData("s;", null)] [InlineData("er=;,err=,errr=\\,errrr=\"", null)] + [InlineData("a@a=a;", null)] + [InlineData("a@ a=a;", null)] + [InlineData("a a=a;", null)] + [InlineData(",a=a;", null)] + [InlineData(",a=a", null)] + [InlineData("a=a;,b=b", new []{ "a" })] // valid cookie followed by invalid cookie + [InlineData(",a=a;b=b", new[] { "b" })] // invalid cookie followed by valid cookie public void ParseInvalidCookies(string cookieToParse, string[] expectedCookieValues) { var cookies = RequestCookieCollection.Parse(new StringValues(new[] { cookieToParse })); diff --git a/src/Http/Shared/CookieHeaderParserShared.cs b/src/Http/Shared/CookieHeaderParserShared.cs index e4b1d83e519a..0eb1c64d533a 100644 --- a/src/Http/Shared/CookieHeaderParserShared.cs +++ b/src/Http/Shared/CookieHeaderParserShared.cs @@ -89,6 +89,17 @@ public static bool TryParseValue(StringSegment value, ref int index, bool suppor if (!TryGetCookieLength(value, ref current, out parsedName, out parsedValue)) { + var separatorIndex = value.IndexOf(';', current); + if (separatorIndex > 0) + { + // Skip the invalid values and keep trying. + index = separatorIndex; + } + else + { + // No more separators, so we're done. + index = value.Length; + } return false; } @@ -97,6 +108,17 @@ public static bool TryParseValue(StringSegment value, ref int index, bool suppor // If we support multiple values and we've not reached the end of the string, then we must have a separator. if ((separatorFound && !supportsMultipleValues) || (!separatorFound && (current < value.Length))) { + var separatorIndex = value.IndexOf(';', current); + if (separatorIndex > 0) + { + // Skip the invalid values and keep trying. + index = separatorIndex; + } + else + { + // No more separators, so we're done. + index = value.Length; + } return false; } @@ -112,7 +134,7 @@ private static int GetNextNonEmptyOrWhitespaceIndex(StringSegment input, int sta separatorFound = false; var current = startIndex + HttpRuleParser.GetWhitespaceLength(input, startIndex); - if ((current == input.Length) || (input[current] != ',' && input[current] != ';')) + if (current == input.Length || input[current] != ';') { return current; } @@ -125,8 +147,8 @@ private static int GetNextNonEmptyOrWhitespaceIndex(StringSegment input, int sta if (skipEmptyValues) { - // Most headers only split on ',', but cookies primarily split on ';' - while ((current < input.Length) && ((input[current] == ',') || (input[current] == ';'))) + // Cookies are split on ';' + while (current < input.Length && input[current] == ';') { current++; // skip delimiter. current = current + HttpRuleParser.GetWhitespaceLength(input, current); @@ -136,6 +158,18 @@ private static int GetNextNonEmptyOrWhitespaceIndex(StringSegment input, int sta return current; } + /* + * https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * cookie-pair = cookie-name "=" cookie-value + * cookie-name = token + * token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + CTL = + */ // name=value; name="value" internal static bool TryGetCookieLength(StringSegment input, ref int offset, [NotNullWhen(true)] out StringSegment? parsedName, [NotNullWhen(true)] out StringSegment? parsedValue) { diff --git a/src/Http/WebUtilities/src/MultipartReaderStream.cs b/src/Http/WebUtilities/src/MultipartReaderStream.cs index 208d1b38f0e0..9dd5ce6a76f8 100644 --- a/src/Http/WebUtilities/src/MultipartReaderStream.cs +++ b/src/Http/WebUtilities/src/MultipartReaderStream.cs @@ -174,7 +174,7 @@ public override int Read(byte[] buffer, int offset, int count) if (index != 0) { // Sync, it's already buffered - var slice = buffer.AsSpan(0, Math.Min(buffer.Length, index)); + var slice = buffer.AsSpan(offset, Math.Min(count, index)); var readAmount = _innerStream.Read(slice); return UpdatePosition(readAmount); diff --git a/src/Http/WebUtilities/test/MultipartReaderTests.cs b/src/Http/WebUtilities/test/MultipartReaderTests.cs index 8231ec472bc0..bc442b567dc0 100644 --- a/src/Http/WebUtilities/test/MultipartReaderTests.cs +++ b/src/Http/WebUtilities/test/MultipartReaderTests.cs @@ -389,4 +389,28 @@ public async Task MultipartReader_StripQuotesFromBoundary() var section = await reader.ReadNextSectionAsync(); Assert.NotNull(section); } + + [Fact] + public async Task SyncReadWithOffsetWorks() + { + var stream = MakeStream(OnePartBody); + var reader = new MultipartReader(Boundary, stream); + var buffer = new byte[5]; + + var section = await reader.ReadNextSectionAsync(); + Assert.NotNull(section); + Assert.Single(section.Headers); + Assert.Equal("form-data; name=\"text\"", section.Headers["Content-Disposition"][0]); + + var read = section.Body.Read(buffer, 2, buffer.Length - 2); + Assert.Equal("\0\0tex", GetString(buffer, read + 2)); + + read = section.Body.Read(buffer, 1, buffer.Length - 1); + Assert.Equal("\0t de", GetString(buffer, read + 1)); + + read = section.Body.Read(buffer, 0, buffer.Length); + Assert.Equal("fault", GetString(buffer, read)); + + Assert.Null(await reader.ReadNextSectionAsync()); + } } diff --git a/src/Identity/Core/src/SignInManager.cs b/src/Identity/Core/src/SignInManager.cs index b5659b329854..66f06c4d3465 100644 --- a/src/Identity/Core/src/SignInManager.cs +++ b/src/Identity/Core/src/SignInManager.cs @@ -162,8 +162,21 @@ public virtual async Task CanSignInAsync(TUser user) public virtual async Task RefreshSignInAsync(TUser user) { var auth = await Context.AuthenticateAsync(AuthenticationScheme); - IList claims = Array.Empty(); + if (!auth.Succeeded || auth.Principal?.Identity?.IsAuthenticated != true) + { + Logger.LogError("RefreshSignInAsync prevented because the user is not currently authenticated. Use SignInAsync instead for initial sign in."); + return; + } + var authenticatedUserId = UserManager.GetUserId(auth.Principal); + var newUserId = await UserManager.GetUserIdAsync(user); + if (authenticatedUserId == null || authenticatedUserId != newUserId) + { + Logger.LogError("RefreshSignInAsync prevented because currently authenticated user has a different UserId. Use SignInAsync instead to change users."); + return; + } + + IList claims = Array.Empty(); var authenticationMethod = auth?.Principal?.FindFirst(ClaimTypes.AuthenticationMethod); var amr = auth?.Principal?.FindFirst("amr"); diff --git a/src/Identity/EntityFrameworkCore/test/EF.Test/UserStoreTest.cs b/src/Identity/EntityFrameworkCore/test/EF.Test/UserStoreTest.cs index f55c2b48340d..fe7eb5ee9003 100644 --- a/src/Identity/EntityFrameworkCore/test/EF.Test/UserStoreTest.cs +++ b/src/Identity/EntityFrameworkCore/test/EF.Test/UserStoreTest.cs @@ -144,6 +144,9 @@ await Assert.ThrowsAsync("user", await Assert.ThrowsAsync("user", async () => await store.GetTwoFactorEnabledAsync(null)); await Assert.ThrowsAsync("user", async () => await store.SetTwoFactorEnabledAsync(null, true)); + await Assert.ThrowsAsync("user", async () => await store.RedeemCodeAsync(user: null, code: "fake", default)); + await Assert.ThrowsAsync("code", async () => await store.RedeemCodeAsync(new IdentityUser("fake"), code: null, default)); + await Assert.ThrowsAsync("code", async () => await store.RedeemCodeAsync(new IdentityUser("fake"), code: "", default)); await Assert.ThrowsAsync("user", async () => await store.GetAccessFailedCountAsync(null)); await Assert.ThrowsAsync("user", async () => await store.GetLockoutEnabledAsync(null)); await Assert.ThrowsAsync("user", async () => await store.SetLockoutEnabledAsync(null, false)); diff --git a/src/Identity/Extensions.Stores/src/UserStoreBase.cs b/src/Identity/Extensions.Stores/src/UserStoreBase.cs index c45dd197e4a2..804ebcbad7dc 100644 --- a/src/Identity/Extensions.Stores/src/UserStoreBase.cs +++ b/src/Identity/Extensions.Stores/src/UserStoreBase.cs @@ -969,7 +969,7 @@ public virtual async Task RedeemCodeAsync(TUser user, string code, Cancell ThrowIfDisposed(); ArgumentNullThrowHelper.ThrowIfNull(user); - ArgumentNullThrowHelper.ThrowIfNull(code); + ArgumentNullThrowHelper.ThrowIfNullOrEmpty(code); var mergedCodes = await GetTokenAsync(user, InternalLoginProvider, RecoveryCodeTokenName, cancellationToken).ConfigureAwait(false) ?? ""; var splitCodes = mergedCodes.Split(';'); diff --git a/src/Identity/UI/src/Microsoft.AspNetCore.Identity.UI.csproj b/src/Identity/UI/src/Microsoft.AspNetCore.Identity.UI.csproj index a43b8e998eec..d36fe09d60c4 100644 --- a/src/Identity/UI/src/Microsoft.AspNetCore.Identity.UI.csproj +++ b/src/Identity/UI/src/Microsoft.AspNetCore.Identity.UI.csproj @@ -90,11 +90,27 @@ %(RecursiveDir)%(FileName)%(Extension) - - + + - + + @@ -106,7 +122,7 @@ > - + - - + + + Discovered + + + Discovered + + + + + - + - + diff --git a/src/Identity/test/Identity.Test/SignInManagerTest.cs b/src/Identity/test/Identity.Test/SignInManagerTest.cs index d1072676138a..73fe6d6be218 100644 --- a/src/Identity/test/Identity.Test/SignInManagerTest.cs +++ b/src/Identity/test/Identity.Test/SignInManagerTest.cs @@ -592,38 +592,38 @@ public async Task CanExternalSignIn(bool isPersistent, bool supportsLockout) [InlineData(true, false)] [InlineData(false, true)] [InlineData(false, false)] - public async Task CanResignIn( - // Suppress warning that says theory methods should use all of their parameters. - // See comments below about why this isn't used. -#pragma warning disable xUnit1026 - bool isPersistent, -#pragma warning restore xUnit1026 - bool externalLogin) + public async Task CanResignIn(bool isPersistent, bool externalLogin) { // Setup var user = new PocoUser { UserName = "Foo" }; var context = new DefaultHttpContext(); var auth = MockAuth(context); var loginProvider = "loginprovider"; - var id = new ClaimsIdentity(); + var id = new ClaimsIdentity("authscheme"); if (externalLogin) { id.AddClaim(new Claim(ClaimTypes.AuthenticationMethod, loginProvider)); } - // REVIEW: auth changes we lost the ability to mock is persistent - //var properties = new AuthenticationProperties { IsPersistent = isPersistent }; - var authResult = AuthenticateResult.NoResult(); + + var claimsPrincipal = new ClaimsPrincipal(id); + var properties = new AuthenticationProperties { IsPersistent = isPersistent }; + var authResult = AuthenticateResult.Success(new AuthenticationTicket(claimsPrincipal, properties, "authscheme")); auth.Setup(a => a.AuthenticateAsync(context, IdentityConstants.ApplicationScheme)) .Returns(Task.FromResult(authResult)).Verifiable(); var manager = SetupUserManager(user); + manager.Setup(m => m.GetUserId(claimsPrincipal)).Returns(user.Id.ToString()); var signInManager = new Mock>(manager.Object, new HttpContextAccessor { HttpContext = context }, new Mock>().Object, null, null, new Mock().Object, null) { CallBase = true }; - //signInManager.Setup(s => s.SignInAsync(user, It.Is(p => p.IsPersistent == isPersistent), - //externalLogin? loginProvider : null)).Returns(Task.FromResult(0)).Verifiable(); - signInManager.Setup(s => s.SignInWithClaimsAsync(user, It.IsAny(), It.IsAny>())).Returns(Task.FromResult(0)).Verifiable(); + + signInManager.Setup(s => s.SignInWithClaimsAsync(user, + It.Is(properties => properties.IsPersistent == isPersistent), + It.Is>(claims => !externalLogin || + claims.Any(claim => claim.Type == ClaimTypes.AuthenticationMethod && claim.Value == loginProvider)))) + .Returns(Task.FromResult(0)).Verifiable(); + signInManager.Object.Context = context; // Act @@ -634,6 +634,58 @@ public async Task CanResignIn( signInManager.Verify(); } + [Fact] + public async Task ResignInNoOpsAndLogsErrorIfNotAuthenticated() + { + var user = new PocoUser { UserName = "Foo" }; + var context = new DefaultHttpContext(); + var auth = MockAuth(context); + var manager = SetupUserManager(user); + var logger = new TestLogger>(); + var signInManager = new Mock>(manager.Object, + new HttpContextAccessor { HttpContext = context }, + new Mock>().Object, + null, logger, new Mock().Object, null) + { CallBase = true }; + auth.Setup(a => a.AuthenticateAsync(context, IdentityConstants.ApplicationScheme)) + .Returns(Task.FromResult(AuthenticateResult.NoResult())).Verifiable(); + + await signInManager.Object.RefreshSignInAsync(user); + + Assert.Contains("RefreshSignInAsync prevented because the user is not currently authenticated. Use SignInAsync instead for initial sign in.", logger.LogMessages); + auth.Verify(); + signInManager.Verify(s => s.SignInWithClaimsAsync(It.IsAny(), It.IsAny(), It.IsAny>()), + Times.Never()); + } + + [Fact] + public async Task ResignInNoOpsAndLogsErrorIfAuthenticatedWithDifferentUser() + { + var user = new PocoUser { UserName = "Foo" }; + var context = new DefaultHttpContext(); + var auth = MockAuth(context); + var manager = SetupUserManager(user); + var logger = new TestLogger>(); + var signInManager = new Mock>(manager.Object, + new HttpContextAccessor { HttpContext = context }, + new Mock>().Object, + null, logger, new Mock().Object, null) + { CallBase = true }; + var id = new ClaimsIdentity("authscheme"); + var claimsPrincipal = new ClaimsPrincipal(id); + var authResult = AuthenticateResult.Success(new AuthenticationTicket(claimsPrincipal, new AuthenticationProperties(), "authscheme")); + auth.Setup(a => a.AuthenticateAsync(context, IdentityConstants.ApplicationScheme)) + .Returns(Task.FromResult(authResult)).Verifiable(); + manager.Setup(m => m.GetUserId(claimsPrincipal)).Returns("different"); + + await signInManager.Object.RefreshSignInAsync(user); + + Assert.Contains("RefreshSignInAsync prevented because currently authenticated user has a different UserId. Use SignInAsync instead to change users.", logger.LogMessages); + auth.Verify(); + signInManager.Verify(s => s.SignInWithClaimsAsync(It.IsAny(), It.IsAny(), It.IsAny>()), + Times.Never()); + } + [Theory] [InlineData(true, true, true, true)] [InlineData(true, true, false, true)] diff --git a/src/Installers/Debian/Directory.Build.targets b/src/Installers/Debian/Directory.Build.targets index 2d854470f156..771ecec7da73 100644 --- a/src/Installers/Debian/Directory.Build.targets +++ b/src/Installers/Debian/Directory.Build.targets @@ -60,5 +60,6 @@ + diff --git a/src/Installers/Debian/Runtime/Debian.Runtime.debproj b/src/Installers/Debian/Runtime/Debian.Runtime.debproj index 343216335618..85391faafac4 100644 --- a/src/Installers/Debian/Runtime/Debian.Runtime.debproj +++ b/src/Installers/Debian/Runtime/Debian.Runtime.debproj @@ -34,6 +34,7 @@ $(RuntimeInstallerBaseName)-$(SharedFxVersion)-x64.deb + $(RuntimeInstallerBaseName)-$(SharedFxVersion)-newkey-x64.deb $(TargetDir)$(TargetFileName) $(VersionPrefix) diff --git a/src/Installers/Debian/TargetingPack/Debian.TargetingPack.debproj b/src/Installers/Debian/TargetingPack/Debian.TargetingPack.debproj index 5295906837c5..4ffc3aa85fe8 100644 --- a/src/Installers/Debian/TargetingPack/Debian.TargetingPack.debproj +++ b/src/Installers/Debian/TargetingPack/Debian.TargetingPack.debproj @@ -38,6 +38,7 @@ $(TargetingPackInstallerBaseName)-$(TargetingPackVersion)-$(TargetArchitecture).deb + $(TargetingPackInstallerBaseName)-$(TargetingPackVersion)-newkey-$(TargetArchitecture).deb $(TargetDir)$(TargetFileName) $(TargetingPackVersionPrefix) diff --git a/src/Installers/Rpm/Directory.Build.props b/src/Installers/Rpm/Directory.Build.props index 252ab60311c5..d74912838189 100644 --- a/src/Installers/Rpm/Directory.Build.props +++ b/src/Installers/Rpm/Directory.Build.props @@ -11,6 +11,7 @@ -cm.1 -cm.2 + -newkey true diff --git a/src/Installers/Rpm/Directory.Build.targets b/src/Installers/Rpm/Directory.Build.targets index 30d705abb1dc..e9800cf062eb 100644 --- a/src/Installers/Rpm/Directory.Build.targets +++ b/src/Installers/Rpm/Directory.Build.targets @@ -40,6 +40,7 @@ $(InstallersOutputPath)$(CblMarinerBaseName)$(CblMariner1VersionSuffix)$(CblMarinerExtension) $(InstallersOutputPath)$(CblMarinerBaseName)$(CblMariner2VersionSuffix)$(CblMarinerExtension) + $(InstallersOutputPath)$(NewKeyBaseName)$(NewKeyVersionSuffix)$(NewKeyExtension) @@ -103,5 +104,13 @@ UseHardlinksIfPossible="False" /> + + + + diff --git a/src/Installers/Rpm/Runtime/Rpm.Runtime.rpmproj b/src/Installers/Rpm/Runtime/Rpm.Runtime.rpmproj index 5e0427aea7f8..26e74893aba8 100644 --- a/src/Installers/Rpm/Runtime/Rpm.Runtime.rpmproj +++ b/src/Installers/Rpm/Runtime/Rpm.Runtime.rpmproj @@ -16,5 +16,7 @@ $(InstallersOutputPath)$(TargetFileName) $(RuntimeInstallerBaseName)-$(SharedFxVersion) -$(RpmArch).rpm + $(RuntimeInstallerBaseName)-$(SharedFxVersion) + -$(RpmArch).rpm diff --git a/src/Installers/Rpm/TargetingPack/Rpm.TargetingPack.rpmproj b/src/Installers/Rpm/TargetingPack/Rpm.TargetingPack.rpmproj index a29db936bd79..7098eaea452c 100644 --- a/src/Installers/Rpm/TargetingPack/Rpm.TargetingPack.rpmproj +++ b/src/Installers/Rpm/TargetingPack/Rpm.TargetingPack.rpmproj @@ -33,6 +33,8 @@ $(InstallersOutputPath)$(TargetFileName) $(TargetingPackInstallerBaseName)-$(TargetingPackVersion) -$(RpmArch).rpm + $(TargetingPackInstallerBaseName)-$(TargetingPackVersion) + -$(RpmArch).rpm $(TargetingPackVersionPrefix) diff --git a/src/Installers/Windows/WindowsHostingBundle/Product.targets b/src/Installers/Windows/WindowsHostingBundle/Product.targets index 3b1cf82c1076..c1dc097445d4 100644 --- a/src/Installers/Windows/WindowsHostingBundle/Product.targets +++ b/src/Installers/Windows/WindowsHostingBundle/Product.targets @@ -83,9 +83,9 @@ --> - - - + + $(DotnetRuntimeSourceFeedKey) diff --git a/src/Middleware/HttpOverrides/src/ForwardedHeadersMiddleware.cs b/src/Middleware/HttpOverrides/src/ForwardedHeadersMiddleware.cs index 6b2a118cb132..72cae1507a0e 100644 --- a/src/Middleware/HttpOverrides/src/ForwardedHeadersMiddleware.cs +++ b/src/Middleware/HttpOverrides/src/ForwardedHeadersMiddleware.cs @@ -21,6 +21,7 @@ public class ForwardedHeadersMiddleware private readonly ForwardedHeadersOptions _options; private readonly RequestDelegate _next; private readonly ILogger _logger; + private readonly bool _ignoreUnknownProxiesWithoutFor; private bool _allowAllHosts; private IList? _allowedHosts; @@ -63,6 +64,18 @@ public ForwardedHeadersMiddleware(RequestDelegate next, ILoggerFactory loggerFac _logger = loggerFactory.CreateLogger(); _next = next; + if (AppContext.TryGetSwitch("Microsoft.AspNetCore.HttpOverrides.IgnoreUnknownProxiesWithoutFor", out var enabled) + && enabled) + { + _ignoreUnknownProxiesWithoutFor = true; + } + + if (Environment.GetEnvironmentVariable("MICROSOFT_ASPNETCORE_HTTPOVERRIDES_IGNORE_UNKNOWN_PROXIES_WITHOUT_FOR") is string env + && (env.Equals("true", StringComparison.OrdinalIgnoreCase) || env.Equals("1"))) + { + _ignoreUnknownProxiesWithoutFor = true; + } + PreProcessHosts(); } @@ -220,19 +233,24 @@ public void ApplyForwarders(HttpContext context) for (; entriesConsumed < sets.Length; entriesConsumed++) { var set = sets[entriesConsumed]; - if (checkFor) + // Opt-out of breaking change behavior where we now always check KnownProxies and KnownNetworks + // It used to be guarded by the ForwardedHeaders.XForwardedFor flag, but now we always check it. + if (!_ignoreUnknownProxiesWithoutFor || checkFor) { // For the first instance, allow remoteIp to be null for servers that don't support it natively. if (currentValues.RemoteIpAndPort != null && checkKnownIps && !CheckKnownAddress(currentValues.RemoteIpAndPort.Address)) { // Stop at the first unknown remote IP, but still apply changes processed so far. - if (_logger.IsEnabled(LogLevel.Debug)) + if (_logger.IsEnabled(LogLevel.Warning)) { - _logger.LogDebug(1, "Unknown proxy: {RemoteIpAndPort}", currentValues.RemoteIpAndPort); + _logger.LogWarning(1, "Unknown proxy: {RemoteIpAndPort}", currentValues.RemoteIpAndPort); } break; } + } + if (checkFor) + { if (IPEndPoint.TryParse(set.IpAndPortText, out var parsedEndPoint)) { applyChanges = true; diff --git a/src/Middleware/HttpOverrides/test/ForwardedHeadersMiddlewareTest.cs b/src/Middleware/HttpOverrides/test/ForwardedHeadersMiddlewareTest.cs index aa33a191e7b7..627ad96a3cd6 100644 --- a/src/Middleware/HttpOverrides/test/ForwardedHeadersMiddlewareTest.cs +++ b/src/Middleware/HttpOverrides/test/ForwardedHeadersMiddlewareTest.cs @@ -7,6 +7,7 @@ using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.TestHost; +using Microsoft.DotNet.RemoteExecutor; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; @@ -962,6 +963,201 @@ public async Task AllOptionsDisabledRequestDoesntChange() Assert.Equal(PathString.Empty, context.Request.PathBase); } + [Theory] + [InlineData(ForwardedHeaders.XForwardedFor, false)] + [InlineData(ForwardedHeaders.XForwardedFor, true)] + [InlineData(ForwardedHeaders.XForwardedHost, false)] + [InlineData(ForwardedHeaders.XForwardedHost, true)] + [InlineData(ForwardedHeaders.XForwardedProto, false)] + [InlineData(ForwardedHeaders.XForwardedProto, true)] + [InlineData(ForwardedHeaders.XForwardedPrefix, false)] + [InlineData(ForwardedHeaders.XForwardedPrefix, true)] + public async Task IgnoreXForwardedHeadersFromUnknownProxy(ForwardedHeaders forwardedHeaders, bool unknownProxy) + { + using var host = new HostBuilder() + .ConfigureWebHost(webHostBuilder => + { + webHostBuilder + .UseTestServer() + .Configure(app => + { + var options = new ForwardedHeadersOptions + { + ForwardedHeaders = forwardedHeaders + }; + if (!unknownProxy) + { + var proxy = IPAddress.Parse("10.0.0.1"); + options.KnownProxies.Add(proxy); + } + app.UseForwardedHeaders(options); + }); + }).Build(); + + await host.StartAsync(); + + var server = host.GetTestServer(); + + var context = await server.SendAsync(c => + { + c.Request.Headers["X-Forwarded-For"] = "11.111.111.11"; + c.Request.Headers["X-Forwarded-Host"] = "testhost"; + c.Request.Headers["X-Forwarded-Proto"] = "Protocol"; + c.Request.Headers["X-Forwarded-Prefix"] = "/pathbase"; + c.Connection.RemoteIpAddress = IPAddress.Parse("10.0.0.1"); + c.Connection.RemotePort = 99; + }); + + if (unknownProxy) + { + Assert.Equal("10.0.0.1", context.Connection.RemoteIpAddress.ToString()); + Assert.Equal("localhost", context.Request.Host.ToString()); + Assert.Equal("http", context.Request.Scheme); + Assert.Equal(PathString.Empty, context.Request.PathBase); + } + else + { + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedFor)) + { + Assert.Equal("11.111.111.11", context.Connection.RemoteIpAddress.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedHost)) + { + Assert.Equal("testhost", context.Request.Host.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedProto)) + { + Assert.Equal("Protocol", context.Request.Scheme); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedPrefix)) + { + Assert.Equal("/pathbase", context.Request.PathBase); + } + } + } + + [Theory] + [InlineData(ForwardedHeaders.XForwardedFor)] + [InlineData(ForwardedHeaders.XForwardedHost)] + [InlineData(ForwardedHeaders.XForwardedProto)] + [InlineData(ForwardedHeaders.XForwardedPrefix)] + public void AppContextDoesNotValidateUnknownProxyWithoutForwardedFor(ForwardedHeaders forwardedHeaders) + { + RemoteExecutor.Invoke(static async (forwardedHeadersName) => + { + Assert.True(Enum.TryParse(forwardedHeadersName, out var forwardedHeaders)); + AppContext.SetSwitch("Microsoft.AspNetCore.HttpOverrides.IgnoreUnknownProxiesWithoutFor", true); + using var host = new HostBuilder() + .ConfigureWebHost(webHostBuilder => + { + webHostBuilder + .UseTestServer() + .Configure(app => + { + var options = new ForwardedHeadersOptions + { + ForwardedHeaders = forwardedHeaders + }; + app.UseForwardedHeaders(options); + }); + }).Build(); + + await host.StartAsync(); + + var server = host.GetTestServer(); + + var context = await server.SendAsync(c => + { + c.Request.Headers["X-Forwarded-For"] = "11.111.111.11"; + c.Request.Headers["X-Forwarded-Host"] = "testhost"; + c.Request.Headers["X-Forwarded-Proto"] = "Protocol"; + c.Request.Headers["X-Forwarded-Prefix"] = "/pathbase"; + c.Connection.RemoteIpAddress = IPAddress.Parse("10.0.0.1"); + c.Connection.RemotePort = 99; + }); + + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedFor)) + { + // X-Forwarded-For ignored since 10.0.0.1 isn't in KnownProxies + Assert.Equal("10.0.0.1", context.Connection.RemoteIpAddress.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedHost)) + { + Assert.Equal("testhost", context.Request.Host.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedProto)) + { + Assert.Equal("Protocol", context.Request.Scheme); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedPrefix)) + { + Assert.Equal("/pathbase", context.Request.PathBase); + } + return RemoteExecutor.SuccessExitCode; + }, forwardedHeaders.ToString()).Dispose(); + } + + [Theory] + [InlineData(ForwardedHeaders.XForwardedFor)] + [InlineData(ForwardedHeaders.XForwardedHost)] + [InlineData(ForwardedHeaders.XForwardedProto)] + [InlineData(ForwardedHeaders.XForwardedPrefix)] + public void EnvVariableDoesNotValidateUnknownProxyWithoutForwardedFor(ForwardedHeaders forwardedHeaders) + { + RemoteExecutor.Invoke(static async (forwardedHeadersName) => + { + Assert.True(Enum.TryParse(forwardedHeadersName, out var forwardedHeaders)); + Environment.SetEnvironmentVariable("MICROSOFT_ASPNETCORE_HTTPOVERRIDES_IGNORE_UNKNOWN_PROXIES_WITHOUT_FOR", "true"); + using var host = new HostBuilder() + .ConfigureWebHost(webHostBuilder => + { + webHostBuilder + .UseTestServer() + .Configure(app => + { + var options = new ForwardedHeadersOptions + { + ForwardedHeaders = forwardedHeaders + }; + app.UseForwardedHeaders(options); + }); + }).Build(); + + await host.StartAsync(); + + var server = host.GetTestServer(); + + var context = await server.SendAsync(c => + { + c.Request.Headers["X-Forwarded-For"] = "11.111.111.11"; + c.Request.Headers["X-Forwarded-Host"] = "testhost"; + c.Request.Headers["X-Forwarded-Proto"] = "Protocol"; + c.Request.Headers["X-Forwarded-Prefix"] = "/pathbase"; + c.Connection.RemoteIpAddress = IPAddress.Parse("10.0.0.1"); + c.Connection.RemotePort = 99; + }); + + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedFor)) + { + // X-Forwarded-For ignored since 10.0.0.1 isn't in KnownProxies + Assert.Equal("10.0.0.1", context.Connection.RemoteIpAddress.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedHost)) + { + Assert.Equal("testhost", context.Request.Host.ToString()); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedProto)) + { + Assert.Equal("Protocol", context.Request.Scheme); + } + if (forwardedHeaders.HasFlag(ForwardedHeaders.XForwardedPrefix)) + { + Assert.Equal("/pathbase", context.Request.PathBase); + } + return RemoteExecutor.SuccessExitCode; + }, forwardedHeaders.ToString()).Dispose(); + } + [Fact] public async Task PartiallyEnabledForwardsPartiallyChangesRequest() { diff --git a/src/Middleware/OutputCaching/src/Memory/MemoryOutputCacheStore.cs b/src/Middleware/OutputCaching/src/Memory/MemoryOutputCacheStore.cs index a75546b6793f..38a38069b32c 100644 --- a/src/Middleware/OutputCaching/src/Memory/MemoryOutputCacheStore.cs +++ b/src/Middleware/OutputCaching/src/Memory/MemoryOutputCacheStore.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Diagnostics; +using System.Linq; using Microsoft.Extensions.Caching.Memory; namespace Microsoft.AspNetCore.OutputCaching.Memory; @@ -9,7 +10,7 @@ namespace Microsoft.AspNetCore.OutputCaching.Memory; internal sealed class MemoryOutputCacheStore : IOutputCacheStore { private readonly MemoryCache _cache; - private readonly Dictionary> _taggedEntries = new(); + private readonly Dictionary> _taggedEntries = []; private readonly object _tagsLock = new(); internal MemoryOutputCacheStore(MemoryCache cache) @@ -20,7 +21,7 @@ internal MemoryOutputCacheStore(MemoryCache cache) } // For testing - internal Dictionary> TaggedEntries => _taggedEntries; + internal Dictionary> TaggedEntries => _taggedEntries.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Select(t => t.Key).ToHashSet()); public ValueTask EvictByTagAsync(string tag, CancellationToken cancellationToken) { @@ -30,7 +31,7 @@ public ValueTask EvictByTagAsync(string tag, CancellationToken cancellationToken { if (_taggedEntries.TryGetValue(tag, out var keys)) { - if (keys != null && keys.Count > 0) + if (keys is { Count: > 0 }) { // If MemoryCache changed to run eviction callbacks inline in Remove, iterating over keys could throw // To prevent allocating a copy of the keys we check if the eviction callback ran, @@ -40,7 +41,7 @@ public ValueTask EvictByTagAsync(string tag, CancellationToken cancellationToken while (i > 0) { var oldCount = keys.Count; - foreach (var key in keys) + foreach (var (key, _) in keys) { _cache.Remove(key); i--; @@ -74,6 +75,8 @@ public ValueTask SetAsync(string key, byte[] value, string[]? tags, TimeSpan val ArgumentNullException.ThrowIfNull(key); ArgumentNullException.ThrowIfNull(value); + var entryId = Guid.NewGuid(); + if (tags != null) { // Lock with SetEntry() to prevent EvictByTagAsync() from trying to remove a tag whose entry hasn't been added yet. @@ -90,27 +93,27 @@ public ValueTask SetAsync(string key, byte[] value, string[]? tags, TimeSpan val if (!_taggedEntries.TryGetValue(tag, out var keys)) { - keys = new HashSet(); + keys = new HashSet(); _taggedEntries[tag] = keys; } Debug.Assert(keys != null); - keys.Add(key); + keys.Add(new TaggedEntry(key, entryId)); } - SetEntry(key, value, tags, validFor); + SetEntry(key, value, tags, validFor, entryId); } } else { - SetEntry(key, value, tags, validFor); + SetEntry(key, value, tags, validFor, entryId); } return ValueTask.CompletedTask; } - void SetEntry(string key, byte[] value, string[]? tags, TimeSpan validFor) + private void SetEntry(string key, byte[] value, string[]? tags, TimeSpan validFor, Guid entryId) { Debug.Assert(key != null); @@ -120,22 +123,25 @@ void SetEntry(string key, byte[] value, string[]? tags, TimeSpan validFor) Size = value.Length }; - if (tags != null && tags.Length > 0) + if (tags is { Length: > 0 }) { // Remove cache keys from tag lists when the entry is evicted - options.RegisterPostEvictionCallback(RemoveFromTags, tags); + options.RegisterPostEvictionCallback(RemoveFromTags, (tags, entryId)); } _cache.Set(key, value, options); } - void RemoveFromTags(object key, object? value, EvictionReason reason, object? state) + private void RemoveFromTags(object key, object? value, EvictionReason reason, object? state) { - var tags = state as string[]; + Debug.Assert(state != null); + + var (tags, entryId) = ((string[] Tags, Guid EntryId))state; Debug.Assert(tags != null); Debug.Assert(tags.Length > 0); Debug.Assert(key is string); + Debug.Assert(entryId != Guid.Empty); lock (_tagsLock) { @@ -143,7 +149,7 @@ void RemoveFromTags(object key, object? value, EvictionReason reason, object? st { if (_taggedEntries.TryGetValue(tag, out var tagged)) { - tagged.Remove((string)key); + tagged.Remove(new TaggedEntry((string)key, entryId)); // Remove the collection if there is no more keys in it if (tagged.Count == 0) @@ -154,4 +160,6 @@ void RemoveFromTags(object key, object? value, EvictionReason reason, object? st } } } + + private record TaggedEntry(string Key, Guid EntryId); } diff --git a/src/Middleware/OutputCaching/test/MemoryOutputCacheStoreTests.cs b/src/Middleware/OutputCaching/test/MemoryOutputCacheStoreTests.cs index e8c809911add..c1ad1d708f4b 100644 --- a/src/Middleware/OutputCaching/test/MemoryOutputCacheStoreTests.cs +++ b/src/Middleware/OutputCaching/test/MemoryOutputCacheStoreTests.cs @@ -197,6 +197,43 @@ public async Task ExpiredEntries_AreRemovedFromTags() Assert.Single(tag2s); } + [Fact] + public async Task ReplacedEntries_AreNotRemovedFromTags() + { + var testClock = new TestMemoryOptionsClock { UtcNow = DateTimeOffset.UtcNow }; + var cache = new MemoryCache(new MemoryCacheOptions { SizeLimit = 1000, Clock = testClock, ExpirationScanFrequency = TimeSpan.FromMilliseconds(1) }); + var store = new MemoryOutputCacheStore(cache); + var value = "abc"u8.ToArray(); + + await store.SetAsync("a", value, new[] { "tag1", "tag2" }, TimeSpan.FromMilliseconds(5), default); + await store.SetAsync("a", value, new[] { "tag1" }, TimeSpan.FromMilliseconds(20), default); + + testClock.Advance(TimeSpan.FromMilliseconds(10)); + + // Trigger background expiration by accessing the cache. + _ = cache.Get("a"); + + var resulta = await store.GetAsync("a", default); + + Assert.NotNull(resulta); + + HashSet tag1s, tag2s; + + // Wait for the tag2 HashSet to be removed by the background expiration thread. + + using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); + + while (store.TaggedEntries.TryGetValue("tag2", out tag2s) && !cts.IsCancellationRequested) + { + await Task.Yield(); + } + + store.TaggedEntries.TryGetValue("tag1", out tag1s); + + Assert.Null(tag2s); + Assert.Single(tag1s); + } + [Theory] [InlineData(null)] public async Task Store_Throws_OnInvalidTag(string tag) diff --git a/src/Mvc/Mvc.ViewFeatures/src/HtmlAttributePropertyHelper.cs b/src/Mvc/Mvc.ViewFeatures/src/HtmlAttributePropertyHelper.cs index c1ac26b46742..f04f8895bd9a 100644 --- a/src/Mvc/Mvc.ViewFeatures/src/HtmlAttributePropertyHelper.cs +++ b/src/Mvc/Mvc.ViewFeatures/src/HtmlAttributePropertyHelper.cs @@ -25,8 +25,7 @@ public HtmlAttributePropertyHelper(PropertyHelper propertyHelper) /// /// Part of contract. /// - /// - public static void UpdateCache(Type _) + public static void ClearCache(Type[] _) { ReflectionCache.Clear(); } diff --git a/src/OpenApi/sample/Controllers/TestController.cs b/src/OpenApi/sample/Controllers/TestController.cs index cf1fed79abb2..26c47ba191d0 100644 --- a/src/OpenApi/sample/Controllers/TestController.cs +++ b/src/OpenApi/sample/Controllers/TestController.cs @@ -24,6 +24,15 @@ public IActionResult PostForm([FromForm] MvcTodo todo) return Ok(todo); } + [HttpGet] + [Produces("application/json")] + [ProducesResponseType(typeof(CurrentWeather), 200)] + [Route("/getcultureinvariant")] + public IActionResult GetCurrentWeather() + { + return Ok(new CurrentWeather(1.0f)); + } + public class RouteParamsContainer { [FromRoute] @@ -36,4 +45,6 @@ public class RouteParamsContainer } public record MvcTodo(string Title, string Description, bool IsCompleted); + + public record CurrentWeather([Range(-100.5f, 100.5f)] float Temperature = 0.1f); } diff --git a/src/OpenApi/sample/EndpointRouteBuilderExtensions.cs b/src/OpenApi/sample/EndpointRouteBuilderExtensions.cs index fd196d7fc101..acf7ae1bd41d 100644 --- a/src/OpenApi/sample/EndpointRouteBuilderExtensions.cs +++ b/src/OpenApi/sample/EndpointRouteBuilderExtensions.cs @@ -43,4 +43,92 @@ public static IEndpointConventionBuilder MapSwaggerUi(this IEndpointRouteBuilder """, "text/html")).ExcludeFromDescription(); } + + public static IEndpointRouteBuilder MapTypesWithRef(this IEndpointRouteBuilder endpoints) + { + endpoints.MapPost("/category", (Category category) => + { + return Results.Ok(category); + }); + endpoints.MapPost("/container", (ContainerType container) => + { + return Results.Ok(container); + }); + endpoints.MapPost("/root", (Root root) => + { + return Results.Ok(root); + }); + endpoints.MapPost("/location", (LocationContainer location) => + { + return Results.Ok(location); + }); + endpoints.MapPost("/parent", (ParentObject parent) => + { + return Results.Ok(parent); + }); + endpoints.MapPost("/child", (ChildObject child) => + { + return Results.Ok(child); + }); + return endpoints; + } + + public sealed class Category + { + public required string Name { get; set; } + + public required Category Parent { get; set; } + + public IEnumerable Tags { get; set; } = []; + } + + public sealed class Tag + { + public required string Name { get; set; } + } + + public sealed class ContainerType + { + public List> Seq1 { get; set; } = []; + public List> Seq2 { get; set; } = []; + } + + public sealed class Root + { + public Item Item1 { get; set; } = null!; + public Item Item2 { get; set; } = null!; + } + + public sealed class Item + { + public string[] Name { get; set; } = null!; + public int value { get; set; } + } + + public sealed class LocationContainer + { + public required LocationDto Location { get; set; } + } + + public sealed class LocationDto + { + public required AddressDto Address { get; set; } + } + + public sealed class AddressDto + { + public required LocationDto RelatedLocation { get; set; } + } + + public sealed class ParentObject + { + public int Id { get; set; } + public List Children { get; set; } = []; + } + + public sealed class ChildObject + { + public int Id { get; set; } + public required ParentObject Parent { get; set; } + } } diff --git a/src/OpenApi/sample/Program.cs b/src/OpenApi/sample/Program.cs index a622780ff482..b0b0a5b053f0 100644 --- a/src/OpenApi/sample/Program.cs +++ b/src/OpenApi/sample/Program.cs @@ -3,6 +3,7 @@ using System.Collections.Immutable; using System.ComponentModel; +using System.Globalization; using Microsoft.AspNetCore.Http.HttpResults; using Microsoft.AspNetCore.Mvc; using Microsoft.OpenApi.Models; @@ -36,6 +37,32 @@ var app = builder.Build(); +// Run requests with a culture that uses commas to format decimals to +// verify the invariant culture is used to generate the OpenAPI document. +app.Use((next) => +{ + return async context => + { + var originalCulture = CultureInfo.CurrentCulture; + var originalUICulture = CultureInfo.CurrentUICulture; + + var newCulture = new CultureInfo("fr-FR"); + + try + { + CultureInfo.CurrentCulture = newCulture; + CultureInfo.CurrentUICulture = newCulture; + + await next(context); + } + finally + { + CultureInfo.CurrentCulture = originalCulture; + CultureInfo.CurrentUICulture = originalUICulture; + } + }; +}); + app.MapOpenApi(); if (app.Environment.IsDevelopment()) { @@ -113,6 +140,7 @@ schemas.MapPost("/shape", (Shape shape) => { }); schemas.MapPost("/weatherforecastbase", (WeatherForecastBase forecast) => { }); schemas.MapPost("/person", (Person person) => { }); +schemas.MapTypesWithRef(); app.MapControllers(); diff --git a/src/OpenApi/src/Comparers/OpenApiSchemaComparer.cs b/src/OpenApi/src/Comparers/OpenApiSchemaComparer.cs index 0591035d2f47..46f91cd8a494 100644 --- a/src/OpenApi/src/Comparers/OpenApiSchemaComparer.cs +++ b/src/OpenApi/src/Comparers/OpenApiSchemaComparer.cs @@ -24,6 +24,36 @@ public bool Equals(OpenApiSchema? x, OpenApiSchema? y) return true; } + // If both have references, compare the final segments to handle + // equivalent types in different contexts, like the same schema + // in a dictionary value or list like "#/components/schemas/#/additionalProperties/properties/location/properties/address" + if (x.Reference != null && y.Reference != null) + { + if (x.Reference.Id.StartsWith("#", StringComparison.OrdinalIgnoreCase) && + y.Reference.Id.StartsWith("#", StringComparison.OrdinalIgnoreCase) && + x.Reference.ReferenceV3 is string xFullReferencePath && + y.Reference.ReferenceV3 is string yFullReferencePath) + { + // Compare the last segments of the reference paths + // to handle equivalent types in different contexts, + // like the same schema in a dictionary value or list + var xLastIndexOf = xFullReferencePath.LastIndexOf('/'); + var yLastIndexOf = yFullReferencePath.LastIndexOf('/'); + + if (xLastIndexOf != -1 && yLastIndexOf != -1) + { + return xFullReferencePath.AsSpan(xLastIndexOf).Equals(yFullReferencePath.AsSpan(yLastIndexOf), StringComparison.OrdinalIgnoreCase); + } + } + } + + // If only one has a reference, compare using schema IDs + if ((x.Reference != null && y.Reference == null) + || (x.Reference == null && y.Reference != null)) + { + return SchemaIdEquals(x, y); + } + // Compare property equality in an order that should help us find inequality faster return x.Type == y.Type && diff --git a/src/OpenApi/src/Extensions/OpenApiEndpointRouteBuilderExtensions.cs b/src/OpenApi/src/Extensions/OpenApiEndpointRouteBuilderExtensions.cs index c5bed38669e4..0533e4f6029f 100644 --- a/src/OpenApi/src/Extensions/OpenApiEndpointRouteBuilderExtensions.cs +++ b/src/OpenApi/src/Extensions/OpenApiEndpointRouteBuilderExtensions.cs @@ -43,10 +43,11 @@ public static IEndpointConventionBuilder MapOpenApi(this IEndpointRouteBuilder e } else { - var document = await documentService.GetOpenApiDocumentAsync(context.RequestServices, context.RequestAborted); + var document = await documentService.GetOpenApiDocumentAsync(context.RequestServices, context.Request, context.RequestAborted); var documentOptions = options.Get(documentName); using var output = MemoryBufferWriter.Get(); - using var writer = Utf8BufferTextWriter.Get(output); + using var writer = new Utf8BufferTextWriter(System.Globalization.CultureInfo.InvariantCulture); + writer.SetWriter(output); try { document.Serialize(new OpenApiJsonWriter(writer), documentOptions.OpenApiVersion); diff --git a/src/OpenApi/src/Schemas/OpenApiJsonSchema.Helpers.cs b/src/OpenApi/src/Schemas/OpenApiJsonSchema.Helpers.cs index 830b0375d396..f05c595b5c2b 100644 --- a/src/OpenApi/src/Schemas/OpenApiJsonSchema.Helpers.cs +++ b/src/OpenApi/src/Schemas/OpenApiJsonSchema.Helpers.cs @@ -136,6 +136,8 @@ internal sealed partial class OpenApiJsonSchema { type = "array"; var array = new OpenApiArray(); + // Read to process JsonTokenType.StartArray before advancing + reader.Read(); while (reader.TokenType != JsonTokenType.EndArray) { array.Add(ReadOpenApiAny(ref reader)); diff --git a/src/OpenApi/src/Services/OpenApiDocumentService.cs b/src/OpenApi/src/Services/OpenApiDocumentService.cs index a9a7ecf6a1a6..b907cc6ecb20 100644 --- a/src/OpenApi/src/Services/OpenApiDocumentService.cs +++ b/src/OpenApi/src/Services/OpenApiDocumentService.cs @@ -14,6 +14,7 @@ using Microsoft.AspNetCore.Hosting.Server; using Microsoft.AspNetCore.Hosting.Server.Features; using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.Extensions; using Microsoft.AspNetCore.Http.Metadata; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.ApiExplorer; @@ -55,7 +56,7 @@ internal sealed class OpenApiDocumentService( internal bool TryGetCachedOperationTransformerContext(string descriptionId, [NotNullWhen(true)] out OpenApiOperationTransformerContext? context) => _operationTransformerContextCache.TryGetValue(descriptionId, out context); - public async Task GetOpenApiDocumentAsync(IServiceProvider scopedServiceProvider, CancellationToken cancellationToken = default) + public async Task GetOpenApiDocumentAsync(IServiceProvider scopedServiceProvider, HttpRequest? httpRequest = null, CancellationToken cancellationToken = default) { // For good hygiene, operation-level tags must also appear in the document-level // tags collection. This set captures all tags that have been seen so far. @@ -74,7 +75,7 @@ public async Task GetOpenApiDocumentAsync(IServiceProvider scop { Info = GetOpenApiInfo(), Paths = await GetOpenApiPathsAsync(capturedTags, scopedServiceProvider, operationTransformers, schemaTransformers, cancellationToken), - Servers = GetOpenApiServers(), + Servers = GetOpenApiServers(httpRequest), Tags = [.. capturedTags] }; try @@ -192,12 +193,26 @@ internal OpenApiInfo GetOpenApiInfo() }; } - internal List GetOpenApiServers() + // Resolve server URL from the request to handle reverse proxies. + // If there is active request object, assume a development environment and use the server addresses. + internal List GetOpenApiServers(HttpRequest? httpRequest = null) + { + if (httpRequest is not null) + { + var serverUrl = UriHelper.BuildAbsolute(httpRequest.Scheme, httpRequest.Host, httpRequest.PathBase); + return [new OpenApiServer { Url = serverUrl }]; + } + else + { + return GetDevelopmentOpenApiServers(); + } + } + private List GetDevelopmentOpenApiServers() { if (hostEnvironment.IsDevelopment() && server?.Features.Get()?.Addresses is { Count: > 0 } addresses) { - return addresses.Select(address => new OpenApiServer { Url = address }).ToList(); + return [.. addresses.Select(address => new OpenApiServer { Url = address })]; } return []; } @@ -411,7 +426,7 @@ private async Task GetResponseAsync( "Query" => ParameterLocation.Query, "Header" => ParameterLocation.Header, "Path" => ParameterLocation.Path, - _ => throw new InvalidOperationException($"Unsupported parameter source: {parameter.Source.Id}") + _ => ParameterLocation.Query }, Required = IsRequired(parameter), Schema = await _componentService.GetOrCreateSchemaAsync(GetTargetType(description, parameter), scopedServiceProvider, schemaTransformers, parameter, cancellationToken: cancellationToken), diff --git a/src/OpenApi/src/Services/Schemas/OpenApiSchemaService.cs b/src/OpenApi/src/Services/Schemas/OpenApiSchemaService.cs index d7ea158b919a..812f896ee25d 100644 --- a/src/OpenApi/src/Services/Schemas/OpenApiSchemaService.cs +++ b/src/OpenApi/src/Services/Schemas/OpenApiSchemaService.cs @@ -32,7 +32,7 @@ internal sealed class OpenApiSchemaService( IOptionsMonitor optionsMonitor) { private readonly OpenApiSchemaStore _schemaStore = serviceProvider.GetRequiredKeyedService(documentName); - private readonly OpenApiJsonSchemaContext _jsonSchemaContext = new OpenApiJsonSchemaContext(new(jsonOptions.Value.SerializerOptions)); + private readonly OpenApiJsonSchemaContext _jsonSchemaContext = new(new(jsonOptions.Value.SerializerOptions)); private readonly JsonSerializerOptions _jsonSerializerOptions = new(jsonOptions.Value.SerializerOptions) { // In order to properly handle the `RequiredAttribute` on type properties, add a modifier to support @@ -102,7 +102,7 @@ internal sealed class OpenApiSchemaService( // "nested": "#/properties/nested" becomes "nested": "#/components/schemas/NestedType" if (jsonPropertyInfo.PropertyType == jsonPropertyInfo.DeclaringType) { - return new JsonObject { [OpenApiSchemaKeywords.RefKeyword] = context.TypeInfo.GetSchemaReferenceId() }; + schema[OpenApiSchemaKeywords.RefKeyword] = createSchemaReferenceId(context.TypeInfo); } schema.ApplyNullabilityContextInfo(jsonPropertyInfo); } @@ -212,8 +212,119 @@ private async Task InnerApplySchemaTransformersAsync(OpenApiSchema schema, } } } + + if (schema is { AdditionalPropertiesAllowed: true, AdditionalProperties: not null } && jsonTypeInfo.ElementType is not null) + { + var elementTypeInfo = _jsonSerializerOptions.GetTypeInfo(jsonTypeInfo.ElementType); + await InnerApplySchemaTransformersAsync(schema.AdditionalProperties, elementTypeInfo, null, context, transformer, cancellationToken); + } } private JsonNode CreateSchema(OpenApiSchemaKey key) - => JsonSchemaExporter.GetJsonSchemaAsNode(_jsonSerializerOptions, key.Type, _configuration); + { + var sourceSchema = JsonSchemaExporter.GetJsonSchemaAsNode(_jsonSerializerOptions, key.Type, _configuration); + + // Resolve any relative references in the schema + ResolveRelativeReferences(sourceSchema, sourceSchema); + + return sourceSchema; + } + + // Helper method to recursively resolve relative references in a schema + private static void ResolveRelativeReferences(JsonNode node, JsonNode rootNode) + { + if (node is JsonObject jsonObj) + { + // Check if this node has a $ref property with a relative reference and no schemaId to + // resolve to + if (jsonObj.TryGetPropertyValue(OpenApiSchemaKeywords.RefKeyword, out var refNode) && + refNode is JsonValue refValue && + refValue.TryGetValue(out var refPath) && + refPath.StartsWith("#/", StringComparison.OrdinalIgnoreCase) && + !jsonObj.TryGetPropertyValue(OpenApiConstants.SchemaId, out var schemaId) && + schemaId is null) + { + // Found a relative reference, resolve it + var resolvedNode = ResolveJsonPointer(rootNode, refPath); + if (resolvedNode != null) + { + // Copy all properties from the resolved node + if (resolvedNode is JsonObject resolvedObj) + { + foreach (var property in resolvedObj) + { + // Clone the property value to avoid modifying the original + var clonedValue = property.Value != null + ? JsonNode.Parse(property.Value.ToJsonString()) + : null; + + jsonObj[property.Key] = clonedValue; + } + } + } + } + else + { + // Recursively process all properties + foreach (var property in jsonObj) + { + if (property.Value is JsonNode propNode) + { + ResolveRelativeReferences(propNode, rootNode); + } + } + } + } + else if (node is JsonArray jsonArray) + { + // Process each item in the array + for (var i = 0; i < jsonArray.Count; i++) + { + if (jsonArray[i] is JsonNode arrayItem) + { + ResolveRelativeReferences(arrayItem, rootNode); + } + } + } + } + + // Helper method to resolve a JSON pointer path and return the referenced node + private static JsonNode? ResolveJsonPointer(JsonNode root, string pointer) + { + if (string.IsNullOrEmpty(pointer) || !pointer.StartsWith("#/", StringComparison.OrdinalIgnoreCase)) + { + return null; // Invalid pointer + } + + // Remove the leading "#/" and split the path into segments + var jsonPointer = pointer.AsSpan(2); + var segments = jsonPointer.Split('/'); + var currentNode = root; + + foreach (var segment in segments) + { + if (currentNode is JsonObject jsonObj) + { + if (!jsonObj.TryGetPropertyValue(jsonPointer[segment].ToString(), out var nextNode)) + { + return null; // Path segment not found + } + currentNode = nextNode; + } + else if (currentNode is JsonArray jsonArray && int.TryParse(jsonPointer[segment], out var index)) + { + if (index < 0 || index >= jsonArray.Count) + { + return null; // Index out of range + } + currentNode = jsonArray[index]; + } + else + { + return null; // Cannot navigate further + } + } + + return currentNode; + } } diff --git a/src/OpenApi/src/Transformers/Implementations/OpenApiSchemaReferenceTransformer.cs b/src/OpenApi/src/Transformers/Implementations/OpenApiSchemaReferenceTransformer.cs index ee7e166daab7..aa98a21894ff 100644 --- a/src/OpenApi/src/Transformers/Implementations/OpenApiSchemaReferenceTransformer.cs +++ b/src/OpenApi/src/Transformers/Implementations/OpenApiSchemaReferenceTransformer.cs @@ -112,6 +112,13 @@ public Task TransformAsync(OpenApiDocument document, OpenApiDocumentTransformerC return new OpenApiSchema { Reference = new OpenApiReference { Type = ReferenceType.Schema, Id = schemaId?.ToString() } }; } + // Handle relative schemas that don't point to the parent document but to another property in the same type. + // In this case, remove the reference and rely on the properties that have been resolved and copied by the OpenApiSchemaService. + if (schema.Reference is { Type: ReferenceType.Schema, Id: var id } && id.StartsWith("#/", StringComparison.Ordinal)) + { + schema.Reference = null; + } + if (schema.AllOf is not null) { for (var i = 0; i < schema.AllOf.Count; i++) diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/OpenApiDocumentIntegrationTests.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/OpenApiDocumentIntegrationTests.cs index 37ebf3c26f06..9c0f540bee3a 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/OpenApiDocumentIntegrationTests.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/OpenApiDocumentIntegrationTests.cs @@ -1,12 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using System.Globalization; using Microsoft.AspNetCore.InternalTesting; -using Microsoft.AspNetCore.OpenApi; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.OpenApi.Models; -using Microsoft.OpenApi.Writers; [UsesVerify] public sealed class OpenApiDocumentIntegrationTests(SampleAppFixture fixture) : IClassFixture @@ -20,21 +15,12 @@ public sealed class OpenApiDocumentIntegrationTests(SampleAppFixture fixture) : [InlineData("schemas-by-ref")] public async Task VerifyOpenApiDocument(string documentName) { - var documentService = fixture.Services.GetRequiredKeyedService(documentName); - var scopedServiceProvider = fixture.Services.CreateScope(); - var document = await documentService.GetOpenApiDocumentAsync(scopedServiceProvider.ServiceProvider); - await Verifier.Verify(GetOpenApiJson(document)) + using var client = fixture.CreateClient(); + var json = await client.GetStringAsync($"/openapi/{documentName}.json"); + await Verify(json) .UseDirectory(SkipOnHelixAttribute.OnHelix() ? Path.Combine(Environment.GetEnvironmentVariable("HELIX_WORKITEM_ROOT"), "Integration", "snapshots") : "snapshots") .UseParameters(documentName); } - - private static string GetOpenApiJson(OpenApiDocument document) - { - using var textWriter = new StringWriter(CultureInfo.InvariantCulture); - var jsonWriter = new OpenApiJsonWriter(textWriter); - document.SerializeAsV3(jsonWriter); - return textWriter.ToString(); - } } diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=controllers.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=controllers.verified.txt index 5f8abe054fd2..3fe8eab4c666 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=controllers.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=controllers.verified.txt @@ -4,6 +4,11 @@ "title": "Sample | controllers", "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/getbyidandname/{id}/{name}": { "get": { @@ -88,9 +93,41 @@ } } } + }, + "/getcultureinvariant": { + "get": { + "tags": [ + "Test" + ], + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CurrentWeather" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "CurrentWeather": { + "type": "object", + "properties": { + "temperature": { + "type": "number", + "format": "float", + "default": 0.1 + } + } + } } }, - "components": { }, "tags": [ { "name": "Test" diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=forms.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=forms.verified.txt index 3e341cabab82..63632cfb7642 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=forms.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=forms.verified.txt @@ -4,6 +4,11 @@ "title": "Sample | forms", "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/forms/form-file": { "post": { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=responses.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=responses.verified.txt index 12fb88cb35e6..984b97169553 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=responses.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=responses.verified.txt @@ -4,6 +4,11 @@ "title": "Sample | responses", "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/responses/200-add-xml": { "get": { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=schemas-by-ref.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=schemas-by-ref.verified.txt index cd00d261b632..ff07dc9d8b1b 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=schemas-by-ref.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=schemas-by-ref.verified.txt @@ -4,6 +4,11 @@ "title": "Sample | schemas-by-ref", "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/schemas-by-ref/typed-results": { "get": { @@ -375,6 +380,138 @@ } } } + }, + "/schemas-by-ref/category": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Category" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/schemas-by-ref/container": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ContainerType" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/schemas-by-ref/root": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Root" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/schemas-by-ref/location": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LocationContainer" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/schemas-by-ref/parent": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ParentObject" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/schemas-by-ref/child": { + "post": { + "tags": [ + "Sample" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChildObject" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "OK" + } + } + } } }, "components": { @@ -391,6 +528,128 @@ } } }, + "AddressDto": { + "required": [ + "relatedLocation" + ], + "type": "object", + "properties": { + "relatedLocation": { + "$ref": "#/components/schemas/LocationDto" + } + } + }, + "Category": { + "required": [ + "name", + "parent" + ], + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "parent": { + "$ref": "#/components/schemas/Category" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Tag" + } + } + } + }, + "ChildObject": { + "required": [ + "parent" + ], + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int32" + }, + "parent": { + "$ref": "#/components/schemas/ParentObject" + } + } + }, + "ContainerType": { + "type": "object", + "properties": { + "seq1": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "seq2": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "Item": { + "type": "object", + "properties": { + "name": { + "type": "array", + "items": { + "type": "string" + } + }, + "value": { + "type": "integer", + "format": "int32" + } + } + }, + "LocationContainer": { + "required": [ + "location" + ], + "type": "object", + "properties": { + "location": { + "$ref": "#/components/schemas/LocationDto" + } + } + }, + "LocationDto": { + "required": [ + "address" + ], + "type": "object", + "properties": { + "address": { + "$ref": "#/components/schemas/AddressDto" + } + } + }, + "ParentObject": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int32" + }, + "children": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ChildObject" + } + } + } + }, "Person": { "required": [ "discriminator" @@ -454,6 +713,17 @@ } } }, + "Root": { + "type": "object", + "properties": { + "item1": { + "$ref": "#/components/schemas/Item" + }, + "item2": { + "$ref": "#/components/schemas/Item" + } + } + }, "Shape": { "required": [ "$type" @@ -517,6 +787,17 @@ } } }, + "Tag": { + "required": [ + "name" + ], + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + }, "Triangle": { "type": "object", "properties": { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v1.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v1.verified.txt index 96ce428e5d17..ef793985d68c 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v1.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v1.verified.txt @@ -4,6 +4,11 @@ "title": "Sample | v1", "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/v1/array-of-guids": { "get": { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v2.verified.txt b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v2.verified.txt index b3d4fa31bff9..c0749f035ec3 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v2.verified.txt +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Integration/snapshots/OpenApiDocumentIntegrationTests.VerifyOpenApiDocument_documentName=v2.verified.txt @@ -11,6 +11,11 @@ }, "version": "1.0.0" }, + "servers": [ + { + "url": "http://localhost/" + } + ], "paths": { "/v2/users": { "get": { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Parameters.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Parameters.cs index 10c65ae2787f..999283022706 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Parameters.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Parameters.cs @@ -4,6 +4,7 @@ using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.ModelBinding; using Microsoft.OpenApi.Models; public partial class OpenApiDocumentServiceTests : OpenApiDocumentServiceTestBase @@ -190,4 +191,29 @@ await VerifyOpenApiDocument(builder, document => Assert.Null(document.Paths["/api/content-type-lower"].Operations[OperationType.Get].Parameters); }); } + + [Fact] + public async Task GetOpenApiParameters_ToleratesCustomBindingSource() + { + var action = CreateActionDescriptor(nameof(ActionWithCustomBinder)); + + await VerifyOpenApiDocument(action, document => + { + var operation = document.Paths["/custom-binding"].Operations[OperationType.Get]; + var parameter = Assert.Single(operation.Parameters); + Assert.Equal("model", parameter.Name); + Assert.Equal(ParameterLocation.Query, parameter.In); + }); + } + + [Route("/custom-binding")] + private void ActionWithCustomBinder([ModelBinder(BinderType = typeof(CustomBinder))] Todo model) { } + + public class CustomBinder : IModelBinder + { + public Task BindModelAsync(ModelBindingContext bindingContext) + { + return Task.CompletedTask; + } + } } diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Servers.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Servers.cs index c84c7e258510..1bc247c95ad4 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Servers.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentService/OpenApiDocumentServiceTests.Servers.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc.ApiExplorer; using Microsoft.AspNetCore.OpenApi; using Microsoft.Extensions.DependencyInjection; @@ -10,6 +11,45 @@ public partial class OpenApiDocumentServiceTests { + [Theory] + [InlineData("Development", "localhost:5001", "", "http", "http://localhost:5001/")] + [InlineData("Development", "example.com", "/api", "https", "https://example.com/api")] + [InlineData("Staging", "localhost:5002", "/v1", "http", "http://localhost:5002/v1")] + [InlineData("Staging", "api.example.com", "/base/path", "https", "https://api.example.com/base/path")] + [InlineData("Development", "localhost", "/", "http", "http://localhost/")] + public void GetOpenApiServers_FavorsHttpContextRequestOverServerAddress(string environment, string host, string pathBase, string scheme, string expectedUri) + { + // Arrange + var hostEnvironment = new HostingEnvironment + { + ApplicationName = "TestApplication", + EnvironmentName = environment + }; + var docService = new OpenApiDocumentService( + "v1", + new Mock().Object, + hostEnvironment, + GetMockOptionsMonitor(), + new Mock().Object, + new OpenApiTestServer(["http://localhost:5000"])); + var httpContext = new DefaultHttpContext() + { + Request = + { + Host = new HostString(host), + PathBase = pathBase, + Scheme = scheme + + } + }; + + // Act + var servers = docService.GetOpenApiServers(httpContext.Request); + + // Assert + Assert.Contains(expectedUri, servers.Select(s => s.Url)); + } + [Fact] public void GetOpenApiServers_HandlesServerAddressFeatureWithValues() { diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentServiceTestsBase.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentServiceTestsBase.cs index e773ebf5ff89..b33eb153de4c 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentServiceTestsBase.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiDocumentServiceTestsBase.cs @@ -35,16 +35,16 @@ public static async Task VerifyOpenApiDocument(IEndpointRouteBuilder builder, Op { var documentService = CreateDocumentService(builder, openApiOptions); var scopedService = ((TestServiceProvider)builder.ServiceProvider).CreateScope(); - var document = await documentService.GetOpenApiDocumentAsync(scopedService.ServiceProvider, cancellationToken); + var document = await documentService.GetOpenApiDocumentAsync(scopedService.ServiceProvider, null, cancellationToken); verifyOpenApiDocument(document); } - public static async Task VerifyOpenApiDocument(ActionDescriptor action, Action verifyOpenApiDocument) + public static async Task VerifyOpenApiDocument(ActionDescriptor action, Action verifyOpenApiDocument, CancellationToken cancellationToken = default) { var builder = CreateBuilder(); var documentService = CreateDocumentService(builder, action); var scopedService = ((TestServiceProvider)builder.ServiceProvider).CreateScope(); - var document = await documentService.GetOpenApiDocumentAsync(scopedService.ServiceProvider); + var document = await documentService.GetOpenApiDocumentAsync(scopedService.ServiceProvider, null); verifyOpenApiDocument(document); } diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiSchemaService/OpenApiSchemaService.ParameterSchemas.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiSchemaService/OpenApiSchemaService.ParameterSchemas.cs index 1c7cb1ba5746..6f87374b1c92 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiSchemaService/OpenApiSchemaService.ParameterSchemas.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Services/OpenApiSchemaService/OpenApiSchemaService.ParameterSchemas.cs @@ -722,4 +722,93 @@ public static bool TryParse(string value, out Student result) return true; } } + + // Regression test for https://github.com/dotnet/aspnetcore/issues/62023 + // Testing that the array parsing in our OpenApiJsonSchema works + [Fact] + public async Task CustomConverterThatOutputsArrayWithDefaultValue() + { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.ConfigureHttpJsonOptions(options => + { + options.SerializerOptions.Converters.Add(new EnumArrayTypeConverter()); + }); + var builder = CreateBuilder(serviceCollection); + + // Act + builder.MapPost("/api", (EnumArrayType e = EnumArrayType.None) => { }); + + // Assert + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/api"].Operations[OperationType.Post]; + var param = Assert.Single(operation.Parameters); + Assert.NotNull(param.Schema); + Assert.IsType(param.Schema.Default); + // Type is null, it's up to the user to configure this via a custom schema + // transformer for types with a converter. + Assert.Null(param.Schema.Type); + }); + } + + [Fact] + public async Task CustomConverterThatOutputsObjectWithDefaultValue() + { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.ConfigureHttpJsonOptions(options => + { + options.SerializerOptions.Converters.Add(new EnumObjectTypeConverter()); + }); + var builder = CreateBuilder(serviceCollection); + + // Act + builder.MapPost("/api", (EnumArrayType e = EnumArrayType.None) => { }); + + // Assert + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/api"].Operations[OperationType.Post]; + var param = Assert.Single(operation.Parameters); + Assert.NotNull(param.Schema); + Assert.IsType(param.Schema.Default); + // Type is null, it's up to the user to configure this via a custom schema + // transformer for types with a converter. + Assert.Null(param.Schema.Type); + }); + } + + public enum EnumArrayType + { + None = 1 + } + + public class EnumArrayTypeConverter : JsonConverter + { + public override EnumArrayType Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + return new EnumArrayType(); + } + + public override void Write(Utf8JsonWriter writer, EnumArrayType value, JsonSerializerOptions options) + { + writer.WriteStartArray(); + writer.WriteEndArray(); + } + } + + public class EnumObjectTypeConverter : JsonConverter + { + public override EnumArrayType Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + return new EnumArrayType(); + } + + public override void Write(Utf8JsonWriter writer, EnumArrayType value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + writer.WriteEndObject(); + } + } } diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/Implementations/OpenApiSchemaReferenceTransformerTests.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/Implementations/OpenApiSchemaReferenceTransformerTests.cs index 4d16ff51d4e7..f8d46f771ca1 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/Implementations/OpenApiSchemaReferenceTransformerTests.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/Implementations/OpenApiSchemaReferenceTransformerTests.cs @@ -477,4 +477,336 @@ await VerifyOpenApiDocument(builder, options, document => Assert.Equal(ReferenceType.Link, responseSchema.Reference.Type); }); } + + [Fact] + public async Task SupportsNestedSchemasWithSelfReference() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/", (LocationContainer item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("LocationContainer", requestSchema.Reference.Id); + + // Assert that $ref is used for nested LocationDto + var locationContainerSchema = requestSchema.GetEffective(document); + Assert.Equal("LocationDto", locationContainerSchema.Properties["location"].Reference.Id); + + // Assert that $ref is used for nested AddressDto + var locationSchema = locationContainerSchema.Properties["location"].GetEffective(document); + Assert.Equal("AddressDto", locationSchema.Properties["address"].Reference.Id); + + // Assert that $ref is used for related LocationDto + var addressSchema = locationSchema.Properties["address"].GetEffective(document); + Assert.Equal("LocationDto", addressSchema.Properties["relatedLocation"].Reference.Id); + + // Assert that only expected schemas are generated at the top-level + Assert.Equal(["AddressDto", "LocationContainer", "LocationDto"], document.Components.Schemas.Keys); + }); + } + + [Fact] + public async Task SupportsListOfNestedSchemasWithSelfReference() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/list", (List items) => { }); + builder.MapPost("/array", (LocationContainer[] items) => { }); + builder.MapPost("/dictionary", (Dictionary items) => { }); + builder.MapPost("/", (LocationContainer item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var listOperation = document.Paths["/list"].Operations[OperationType.Post]; + var listRequestSchema = listOperation.RequestBody.Content["application/json"].Schema; + + var arrayOperation = document.Paths["/array"].Operations[OperationType.Post]; + var arrayRequestSchema = arrayOperation.RequestBody.Content["application/json"].Schema; + + var dictionaryOperation = document.Paths["/dictionary"].Operations[OperationType.Post]; + var dictionaryRequestSchema = dictionaryOperation.RequestBody.Content["application/json"].Schema; + + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("LocationContainer", listRequestSchema.Items.Reference.Id); + Assert.Equal("LocationContainer", arrayRequestSchema.Items.Reference.Id); + Assert.Equal("LocationContainer", dictionaryRequestSchema.AdditionalProperties.Reference.Id); + Assert.Equal("LocationContainer", requestSchema.Reference.Id); + + // Assert that $ref is used for nested LocationDto + var locationContainerSchema = requestSchema.GetEffective(document); + Assert.Equal("LocationDto", locationContainerSchema.Properties["location"].Reference.Id); + + // Assert that $ref is used for nested AddressDto + var locationSchema = locationContainerSchema.Properties["location"].GetEffective(document); + Assert.Equal("AddressDto", locationSchema.Properties["address"].Reference.Id); + + // Assert that $ref is used for related LocationDto + var addressSchema = locationSchema.Properties["address"].GetEffective(document); + Assert.Equal("LocationDto", addressSchema.Properties["relatedLocation"].Reference.Id); + + // Assert that only expected schemas are generated at the top-level + Assert.Equal(3, document.Components.Schemas.Count); + Assert.Equal(["AddressDto", "LocationContainer", "LocationDto"], document.Components.Schemas.Keys); + }); + } + + [Fact] + public async Task SupportsListNestedSchemasWithSelfReference() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/", (ParentObject item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("ParentObject", requestSchema.Reference.Id); + + // Assert that $ref is used for nested Children + var parentSchema = requestSchema.GetEffective(document); + Assert.Equal("ChildObject", parentSchema.Properties["children"].Items.Reference.Id); + + // Assert that $ref is used for nested Parent + var childSchema = parentSchema.Properties["children"].Items.GetEffective(document); + Assert.Equal("ParentObject", childSchema.Properties["parent"].Reference.Id); + + // Assert that only the expected schemas are registered + Assert.Equal(["ChildObject", "ParentObject"], document.Components.Schemas.Keys); + }); + } + + [Fact] + public async Task SupportsMultiplePropertiesWithSameType() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/", (Root item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("Root", requestSchema.Reference.Id); + + // Assert that $ref is used for nested Item1 + var rootSchema = requestSchema.GetEffective(document); + Assert.Equal("Item", rootSchema.Properties["item1"].Reference.Id); + + // Assert that $ref is used for nested Item2 + Assert.Equal("Item", rootSchema.Properties["item2"].Reference.Id); + }); + } + + // Test for: https://github.com/dotnet/aspnetcore/issues/60381 + [Fact] + public async Task ResolvesListBasedReferencesCorrectly() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/", (ContainerType item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("ContainerType", requestSchema.Reference.Id); + + // Get effective schema for ContainerType + var containerSchema = requestSchema.GetEffective(document); + Assert.Equal(2, containerSchema.Properties.Count); + + // Check Seq1 and Seq2 properties + var seq1Schema = containerSchema.Properties["seq1"]; + var seq2Schema = containerSchema.Properties["seq2"]; + + // Assert both are array types + Assert.Equal("array", seq1Schema.Type); + Assert.Equal("array", seq2Schema.Type); + + // Assert items are arrays of strings + Assert.Equal("array", seq1Schema.Items.Type); + Assert.Equal("array", seq2Schema.Items.Type); + + // Since both Seq1 and Seq2 are the same type (List>), + // they should reference the same schema structure + Assert.Equal(seq1Schema.Items.Type, seq2Schema.Items.Type); + + // Verify the inner arrays contain strings + Assert.Equal("string", seq1Schema.Items.Items.Type); + Assert.Equal("string", seq2Schema.Items.Items.Type); + + Assert.Equal(["ContainerType"], document.Components.Schemas.Keys); + }); + } + + // Tests for: https://github.com/dotnet/aspnetcore/issues/60012 + [Fact] + public async Task SupportsListOfClassInSelfReferentialSchema() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/", (Category item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("Category", requestSchema.Reference.Id); + + // Assert that $ref is used for nested Tags + var categorySchema = requestSchema.GetEffective(document); + Assert.Equal("Tag", categorySchema.Properties["tags"].Items.Reference.Id); + + // Assert that $ref is used for nested Parent + Assert.Equal("Category", categorySchema.Properties["parent"].Reference.Id); + + // Assert that no duplicate schemas are emitted + Assert.Collection(document.Components.Schemas, + schema => + { + Assert.Equal("Category", schema.Key); + }, + schema => + { + Assert.Equal("Tag", schema.Key); + }); + }); + } + + [Fact] + public async Task UsesSameReferenceForSameTypeInDifferentLocations() + { + // Arrange + var builder = CreateBuilder(); + + builder.MapPost("/parent-object", (ParentObject item) => { }); + builder.MapPost("/list", (List item) => { }); + builder.MapPost("/dictionary", (Dictionary item) => { }); + + await VerifyOpenApiDocument(builder, document => + { + var operation = document.Paths["/parent-object"].Operations[OperationType.Post]; + var requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for top-level + Assert.Equal("ParentObject", requestSchema.Reference.Id); + + // Assert that $ref is used for nested Children + var parentSchema = requestSchema.GetEffective(document); + Assert.Equal("ChildObject", parentSchema.Properties["children"].Items.Reference.Id); + + // Assert that $ref is used for nested Parent + var childSchema = parentSchema.Properties["children"].Items.GetEffective(document); + Assert.Equal("ParentObject", childSchema.Properties["parent"].Reference.Id); + + operation = document.Paths["/list"].Operations[OperationType.Post]; + requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for items in the list definition + Assert.Equal("ParentObject", requestSchema.Items.Reference.Id); + parentSchema = requestSchema.Items.GetEffective(document); + Assert.Equal("ChildObject", parentSchema.Properties["children"].Items.Reference.Id); + + childSchema = parentSchema.Properties["children"].Items.GetEffective(document); + Assert.Equal("ParentObject", childSchema.Properties["parent"].Reference.Id); + + operation = document.Paths["/dictionary"].Operations[OperationType.Post]; + requestSchema = operation.RequestBody.Content["application/json"].Schema; + + // Assert $ref used for items in the dictionary definition + Assert.Equal("ParentObject", requestSchema.AdditionalProperties.Reference.Id); + parentSchema = requestSchema.AdditionalProperties.GetEffective(document); + Assert.Equal("ChildObject", parentSchema.Properties["children"].Items.Reference.Id); + + childSchema = parentSchema.Properties["children"].Items.GetEffective(document); + Assert.Equal("ParentObject", childSchema.Properties["parent"].Reference.Id); + + // Assert that only the expected schemas are registered + Assert.Equal(["ChildObject", "ParentObject"], document.Components.Schemas.Keys); + }); + } + + private class Category + { + public required string Name { get; set; } + + public Category Parent { get; set; } + + public IEnumerable Tags { get; set; } = []; + } + + public class Tag + { + public required string Name { get; set; } + } + + private class ContainerType + { + public List> Seq1 { get; set; } = []; + public List> Seq2 { get; set; } = []; + } + + private class Root + { + public Item Item1 { get; set; } = null!; + public Item Item2 { get; set; } = null!; + } + + private class Item + { + public string[] Name { get; set; } = null!; + public int value { get; set; } + } + + private class LocationContainer + { + public LocationDto Location { get; set; } + } + + private class LocationDto + { + public AddressDto Address { get; set; } + } + + private class AddressDto + { + public LocationDto RelatedLocation { get; set; } + } + +#nullable enable + private class ParentObject + { + public int Id { get; set; } + public List Children { get; set; } = []; + } + + private class ChildObject + { + public int Id { get; set; } + public required ParentObject Parent { get; set; } + } } +#nullable restore diff --git a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/SchemaTransformerTests.cs b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/SchemaTransformerTests.cs index 5d20c810d6fa..565c3a2b48b1 100644 --- a/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/SchemaTransformerTests.cs +++ b/src/OpenApi/test/Microsoft.AspNetCore.OpenApi.Tests/Transformers/SchemaTransformerTests.cs @@ -444,6 +444,7 @@ public async Task SchemaTransformer_CanModifyItemTypesInADocument() builder.MapGet("/list", () => new List { 1, 2, 3, 4 }); builder.MapGet("/single", () => 1); + builder.MapGet("/dictionary", () => new Dictionary {{ "key", 1 }}); var options = new OpenApiOptions(); options.AddSchemaTransformer((schema, context, cancellationToken) => @@ -469,7 +470,13 @@ await VerifyOpenApiDocument(builder, options, document => getOperation = path.Operations[OperationType.Get]; responseSchema = getOperation.Responses["200"].Content["application/json"].Schema.GetEffective(document); Assert.Equal("modified-number-format", responseSchema.Format); - }); + + // Assert that the schema represent dictionary values has been modified + path = document.Paths["/dictionary"]; + getOperation = path.Operations[OperationType.Get]; + responseSchema = getOperation.Responses["200"].Content["application/json"].Schema.GetEffective(document); + Assert.Equal("modified-number-format", responseSchema.AdditionalProperties.Format); + }); } [Fact] diff --git a/src/Security/Authentication/Certificate/src/CertificateAuthenticationHandler.cs b/src/Security/Authentication/Certificate/src/CertificateAuthenticationHandler.cs index 8f8873057027..de74d6e5bf7c 100644 --- a/src/Security/Authentication/Certificate/src/CertificateAuthenticationHandler.cs +++ b/src/Security/Authentication/Certificate/src/CertificateAuthenticationHandler.cs @@ -135,21 +135,35 @@ private async Task ValidateCertificateAsync(X509Certificate2 } var chainPolicy = BuildChainPolicy(clientCertificate, isCertificateSelfSigned); - using var chain = new X509Chain + var chain = new X509Chain { ChainPolicy = chainPolicy }; - var certificateIsValid = chain.Build(clientCertificate); - if (!certificateIsValid) + try + { + var certificateIsValid = chain.Build(clientCertificate); + if (!certificateIsValid) + { + var chainErrors = new List(chain.ChainStatus.Length); + foreach (var validationFailure in chain.ChainStatus) + { + chainErrors.Add($"{validationFailure.Status} {validationFailure.StatusInformation}"); + } + Logger.CertificateFailedValidation(clientCertificate.Subject, chainErrors); + return AuthenticateResults.InvalidClientCertificate; + } + } + finally { - var chainErrors = new List(chain.ChainStatus.Length); - foreach (var validationFailure in chain.ChainStatus) + // Disposing the chain does not dispose the elements we potentially built. + // Do the full walk manually to dispose. + for (var i = 0; i < chain.ChainElements.Count; i++) { - chainErrors.Add($"{validationFailure.Status} {validationFailure.StatusInformation}"); + chain.ChainElements[i].Certificate.Dispose(); } - Logger.CertificateFailedValidation(clientCertificate.Subject, chainErrors); - return AuthenticateResults.InvalidClientCertificate; + + chain.Dispose(); } var certificateValidatedContext = new CertificateValidatedContext(Context, Scheme, Options) diff --git a/src/Security/Authentication/JwtBearer/src/JwtBearerConfigureOptions.cs b/src/Security/Authentication/JwtBearer/src/JwtBearerConfigureOptions.cs index 6ce4d6014bbb..1ec03cca626c 100644 --- a/src/Security/Authentication/JwtBearer/src/JwtBearerConfigureOptions.cs +++ b/src/Security/Authentication/JwtBearer/src/JwtBearerConfigureOptions.cs @@ -72,7 +72,7 @@ public void Configure(string? name, JwtBearerOptions options) ValidAudiences = audiences, ValidAudience = audience, ValidateIssuerSigningKey = true, - IssuerSigningKeys = GetIssuerSigningKeys(configSection, issuers), + IssuerSigningKeys = GetIssuerSigningKeys(configSection, [issuer, ..issuers]), }; } diff --git a/src/Security/Authentication/test/JwtBearerTests_Handler.cs b/src/Security/Authentication/test/JwtBearerTests_Handler.cs index 1c24afe93cc0..dc5eb760a270 100644 --- a/src/Security/Authentication/test/JwtBearerTests_Handler.cs +++ b/src/Security/Authentication/test/JwtBearerTests_Handler.cs @@ -957,6 +957,7 @@ public async Task ExpirationAndIssuedWhenMinOrMaxValue() public void CanReadJwtBearerOptionsFromConfig() { var services = new ServiceCollection(); + var key = "qPG6tDtfxFYZifHW3sEueQ=="; var config = new ConfigurationBuilder().AddInMemoryCollection([ new("Authentication:Schemes:Bearer:ValidIssuer", "dotnet-user-jwts"), new("Authentication:Schemes:Bearer:ValidIssuers:0", "dotnet-user-jwts-2"), @@ -965,6 +966,9 @@ public void CanReadJwtBearerOptionsFromConfig() new("Authentication:Schemes:Bearer:BackchannelTimeout", "00:01:00"), new("Authentication:Schemes:Bearer:RequireHttpsMetadata", "false"), new("Authentication:Schemes:Bearer:SaveToken", "True"), + new("Authentication:Schemes:Bearer:SigningKeys:0:Issuer", "dotnet-user-jwts"), + new("Authentication:Schemes:Bearer:SigningKeys:0:Value", key), + new("Authentication:Schemes:Bearer:SigningKeys:0:Length", "32"), ]).Build(); services.AddSingleton(config); @@ -987,6 +991,10 @@ public void CanReadJwtBearerOptionsFromConfig() Assert.True(jwtBearerOptions.MapInboundClaims); Assert.True(jwtBearerOptions.TokenValidationParameters.ValidateIssuer); Assert.True(jwtBearerOptions.TokenValidationParameters.ValidateAudience); + + var securityKey = Assert.Single(jwtBearerOptions.TokenValidationParameters.IssuerSigningKeys); + var symmetricKey = Assert.IsType(securityKey); + Assert.Equal(key, Convert.ToBase64String(symmetricKey.Key)); } [Fact] diff --git a/src/Servers/HttpSys/test/FunctionalTests/DelegateTests.cs b/src/Servers/HttpSys/test/FunctionalTests/DelegateTests.cs index 79b77e32a93e..8cb6332a8f6d 100644 --- a/src/Servers/HttpSys/test/FunctionalTests/DelegateTests.cs +++ b/src/Servers/HttpSys/test/FunctionalTests/DelegateTests.cs @@ -217,6 +217,7 @@ public async Task UpdateDelegationRuleTest() [ConditionalFact] [DelegateSupportedCondition(true)] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/60141")] public async Task DelegateAfterReceiverRestart() { var queueName = Guid.NewGuid().ToString(); diff --git a/src/Servers/HttpSys/test/FunctionalTests/ResponseBodyTests.cs b/src/Servers/HttpSys/test/FunctionalTests/ResponseBodyTests.cs index 90634d72cfbf..7c263bac6371 100644 --- a/src/Servers/HttpSys/test/FunctionalTests/ResponseBodyTests.cs +++ b/src/Servers/HttpSys/test/FunctionalTests/ResponseBodyTests.cs @@ -206,6 +206,7 @@ public async Task ResponseBody_WriteChunked_ManuallyChunked() } [ConditionalFact] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/63532")] public async Task ResponseBody_WriteContentLength_PassedThrough() { string address; diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.cpp b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.cpp index c49200ce8d2f..e7892419d123 100644 --- a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.cpp +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.cpp @@ -133,6 +133,23 @@ std::wstring Environment::GetDllDirectoryValue() return expandedStr; } +ProcessorArchitecture Environment::GetCurrentProcessArchitecture() +{ + // Use compile-time detection - we know which architectures we support + // and this is the most reliable and efficient approach. IsWow64Process2 + // doesn't show the correct architecture when running under x64 emulation + // on ARM64. +#if defined(_M_ARM64) + return ProcessorArchitecture::ARM64; +#elif defined(_M_AMD64) + return ProcessorArchitecture::AMD64; +#elif defined(_M_IX86) + return ProcessorArchitecture::x86; +#else + static_assert(false, "Unknown target architecture"); +#endif +} + bool Environment::IsRunning64BitProcess() { // Check the bitness of the currently running process diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.h b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.h index 9e3e1b1bf772..a9e6e85d9ecc 100644 --- a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.h +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/Environment.h @@ -5,6 +5,7 @@ #include #include +#include "ProcessorArchitecture.h" class Environment { @@ -23,6 +24,8 @@ class Environment static bool IsRunning64BitProcess(); static + ProcessorArchitecture GetCurrentProcessArchitecture(); + static HRESULT CopyToDirectory(const std::wstring& source, const std::filesystem::path& destination, bool cleanDest, const std::filesystem::path& directoryToIgnore, int& copiedFileCount); static bool CheckUpToDate(const std::wstring& source, const std::filesystem::path& destination, const std::wstring& extension, const std::filesystem::path& directoryToIgnore); diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.cpp b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.cpp index 9b12cd0132b4..8552852ce0e5 100644 --- a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.cpp +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.cpp @@ -197,7 +197,7 @@ HostFxrResolver::TryGetHostFxrPath( size_t size = MAX_PATH * 2; hostfxrPath.resize(size); - get_hostfxr_parameters params; + get_hostfxr_parameters params{}; params.size = sizeof(get_hostfxr_parameters); params.assembly_path = applicationPath.c_str(); params.dotnet_root = dotnetRoot.c_str(); @@ -393,7 +393,7 @@ HostFxrResolver::GetAbsolutePathToDotnetFromHostfxr(const fs::path& hostfxrPath) // Tries to call where.exe to find the location of dotnet.exe. // Will check that the bitness of dotnet matches the current // worker process bitness. -// Returns true if a valid dotnet was found, else false.R +// Returns true if a valid dotnet was found, else false. // std::optional HostFxrResolver::InvokeWhereToFindDotnet() @@ -409,14 +409,12 @@ HostFxrResolver::InvokeWhereToFindDotnet() HandleWrapper hStdOutWritePipe; HandleWrapper hProcess; HandleWrapper hThread; - CComBSTR pwzDotnetName = NULL; - DWORD dwFilePointer; - BOOL fIsCurrentProcess64Bit; - DWORD dwExitCode; + CComBSTR pwzDotnetName = nullptr; + DWORD dwFilePointer = 0; + DWORD dwExitCode = 0; STRU struDotnetSubstring; STRU struDotnetLocationsString; - DWORD dwNumBytesRead; - DWORD dwBinaryType; + DWORD dwNumBytesRead = 0; INT index = 0; INT prevIndex = 0; std::optional result; @@ -427,6 +425,7 @@ HostFxrResolver::InvokeWhereToFindDotnet() securityAttributes.bInheritHandle = TRUE; LOG_INFO(L"Invoking where.exe to find dotnet.exe"); + auto currentProcessArch = Environment::GetCurrentProcessArchitecture(); // Create a read/write pipe that will be used for reading the result of where.exe FINISHED_LAST_ERROR_IF(!CreatePipe(&hStdOutReadPipe, &hStdOutWritePipe, &securityAttributes, 0)); @@ -500,13 +499,9 @@ HostFxrResolver::InvokeWhereToFindDotnet() } FINISHED_IF_FAILED(struDotnetLocationsString.CopyA(pzFileContents, dwNumBytesRead)); - LOG_INFOF(L"where.exe invocation returned: '%ls'", struDotnetLocationsString.QueryStr()); - fIsCurrentProcess64Bit = Environment::IsRunning64BitProcess(); - - LOG_INFOF(L"Current process bitness type detected as isX64=%d", fIsCurrentProcess64Bit); - + // Look for a dotnet.exe that matches the current process architecture while (TRUE) { index = struDotnetLocationsString.IndexOf(L"\r\n", prevIndex); @@ -519,26 +514,83 @@ HostFxrResolver::InvokeWhereToFindDotnet() // \r\n is two wchars, so add 2 here. prevIndex = index + 2; - LOG_INFOF(L"Processing entry '%ls'", struDotnetSubstring.QueryStr()); - - if (LOG_LAST_ERROR_IF(!GetBinaryTypeW(struDotnetSubstring.QueryStr(), &dwBinaryType))) + ProcessorArchitecture dotnetArch = GetFileProcessorArchitecture(struDotnetSubstring.QueryStr()); + if (dotnetArch == currentProcessArch) { - continue; - } - - LOG_INFOF(L"Binary type %d", dwBinaryType); + LOG_INFOF(L"Found dotnet.exe matching current process architecture (%ls) '%ls'", + ProcessorArchitectureToString(dotnetArch), + struDotnetSubstring.QueryStr()); - if (fIsCurrentProcess64Bit == (dwBinaryType == SCS_64BIT_BINARY)) - { - // The bitness of dotnet matched with the current worker process bitness. return std::make_optional(struDotnetSubstring.QueryStr()); } + else + { + LOG_INFOF(L"Skipping dotnet.exe with non-matching architecture %ls (need %ls). '%ls'", + ProcessorArchitectureToString(dotnetArch), + ProcessorArchitectureToString(currentProcessArch), + struDotnetSubstring.QueryStr()); + } } Finished: return result; } +// Reads the PE header of the binary to determine its architecture. +ProcessorArchitecture HostFxrResolver::GetFileProcessorArchitecture(const WCHAR* binaryPath) +{ + // Errors while reading from the file shouldn't throw unless + // file.exception(bits) is set + std::ifstream file(binaryPath, std::ios::binary); + if (!file.is_open()) + { + LOG_TRACEF(L"Failed to open file %ls", binaryPath); + return ProcessorArchitecture::Unknown; + } + + // Read the DOS header + IMAGE_DOS_HEADER dosHeader{}; + file.read(reinterpret_cast(&dosHeader), sizeof(dosHeader)); + if (dosHeader.e_magic != IMAGE_DOS_SIGNATURE) // 'MZ' + { + LOG_TRACEF(L"%ls is not a valid executable file (missing MZ header).", binaryPath); + return ProcessorArchitecture::Unknown; + } + + // Seek to the PE header + file.seekg(dosHeader.e_lfanew, std::ios::beg); + + // Read the PE header + DWORD peSignature{}; + file.read(reinterpret_cast(&peSignature), sizeof(peSignature)); + if (peSignature != IMAGE_NT_SIGNATURE) // 'PE\0\0' + { + LOG_TRACEF(L"%ls is not a valid PE file (missing PE header).", binaryPath); + return ProcessorArchitecture::Unknown; + } + + // Read the file header + IMAGE_FILE_HEADER fileHeader{}; + file.read(reinterpret_cast(&fileHeader), sizeof(fileHeader)); + + // Determine the architecture based on the machine type + switch (fileHeader.Machine) + { + case IMAGE_FILE_MACHINE_I386: + LOG_INFOF(L"%ls is x86 (32-bit)", binaryPath); + return ProcessorArchitecture::x86; + case IMAGE_FILE_MACHINE_AMD64: + LOG_INFOF(L"%ls is AMD64 (x64)", binaryPath); + return ProcessorArchitecture::AMD64; + case IMAGE_FILE_MACHINE_ARM64: + LOG_INFOF(L"%ls is ARM64", binaryPath); + return ProcessorArchitecture::ARM64; + default: + LOG_INFOF(L"%ls has unknown architecture (machine type: 0x%X)", binaryPath, fileHeader.Machine); + return ProcessorArchitecture::Unknown; + } +} + std::optional HostFxrResolver::GetAbsolutePathToDotnetFromProgramFiles() { diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.h b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.h index 519f6df52c97..9065e2aecd2b 100644 --- a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.h +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/HostFxrResolver.h @@ -8,8 +8,8 @@ #include #include #include - #include "ErrorContext.h" +#include "ProcessorArchitecture.h" #define READ_BUFFER_SIZE 4096 @@ -74,6 +74,8 @@ class HostFxrResolver const std::filesystem::path & requestedPath ); + static ProcessorArchitecture GetFileProcessorArchitecture(const WCHAR* binaryPath); + struct LocalFreeDeleter { void operator ()(_In_ LPWSTR* ptr) const diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/ProcessorArchitecture.h b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/ProcessorArchitecture.h new file mode 100644 index 000000000000..195feddcae7b --- /dev/null +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLib/ProcessorArchitecture.h @@ -0,0 +1,28 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +#pragma once + +enum class ProcessorArchitecture +{ + Unknown, + x86, + AMD64, + ARM64 +}; + +inline const wchar_t* ProcessorArchitectureToString(ProcessorArchitecture arch) +{ + switch (arch) + { + case ProcessorArchitecture::x86: + return L"x86"; + case ProcessorArchitecture::AMD64: + return L"AMD64"; + case ProcessorArchitecture::ARM64: + return L"ARM64"; + case ProcessorArchitecture::Unknown: + default: + return L"Unknown"; + } +} \ No newline at end of file diff --git a/src/Servers/IIS/AspNetCoreModuleV2/CommonLibTests/main.cpp b/src/Servers/IIS/AspNetCoreModuleV2/CommonLibTests/main.cpp index 86c764df8533..3e1bbc1add95 100644 --- a/src/Servers/IIS/AspNetCoreModuleV2/CommonLibTests/main.cpp +++ b/src/Servers/IIS/AspNetCoreModuleV2/CommonLibTests/main.cpp @@ -8,5 +8,5 @@ DECLARE_DEBUG_PRINT_OBJECT2("tests", ASPNETCORE_DEBUG_FLAG_INFO | ASPNETCORE_DEB int wmain(int argc, wchar_t* argv[]) { ::testing::InitGoogleTest(&argc, argv); - RUN_ALL_TESTS(); + return RUN_ALL_TESTS(); } diff --git a/src/Servers/IIS/build/Build.Lib.Settings b/src/Servers/IIS/build/Build.Lib.Settings index 0dcba8c2011a..9327eb363771 100644 --- a/src/Servers/IIS/build/Build.Lib.Settings +++ b/src/Servers/IIS/build/Build.Lib.Settings @@ -9,7 +9,7 @@ - false + true _LIB;%(PreprocessorDefinitions) true diff --git a/src/Servers/Kestrel/Core/src/CoreStrings.resx b/src/Servers/Kestrel/Core/src/CoreStrings.resx index 7f6c785963f6..c6fb576b6011 100644 --- a/src/Servers/Kestrel/Core/src/CoreStrings.resx +++ b/src/Servers/Kestrel/Core/src/CoreStrings.resx @@ -737,4 +737,10 @@ For more information on configuring HTTPS see https://go.microsoft.com/fwlink/?l Call UseKestrelHttpsConfiguration() on IWebHostBuilder to automatically enable HTTPS when an https:// address is used. - + + The client sent a {frameType} frame to a control stream that was too large. + + + Bad chunk extension. + + \ No newline at end of file diff --git a/src/Servers/Kestrel/Core/src/Http3Limits.cs b/src/Servers/Kestrel/Core/src/Http3Limits.cs index 0d7801e48bf8..b6556557a340 100644 --- a/src/Servers/Kestrel/Core/src/Http3Limits.cs +++ b/src/Servers/Kestrel/Core/src/Http3Limits.cs @@ -37,7 +37,7 @@ internal int HeaderTableSize /// /// Indicates the size of the maximum allowed size of a request header field sequence. This limit applies to both name and value sequences in their compressed and uncompressed representations. /// - /// Value must be greater than 0, defaults to 2^14 (16,384). + /// Value must be greater than 0, defaults to 2^15 (32,768). /// /// public int MaxRequestHeaderFieldSize diff --git a/src/Servers/Kestrel/Core/src/Internal/Http/Http1ChunkedEncodingMessageBody.cs b/src/Servers/Kestrel/Core/src/Internal/Http/Http1ChunkedEncodingMessageBody.cs index 5e426ed25721..6f2b39a205b7 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http/Http1ChunkedEncodingMessageBody.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http/Http1ChunkedEncodingMessageBody.cs @@ -16,6 +16,7 @@ internal sealed class Http1ChunkedEncodingMessageBody : Http1MessageBody { // byte consts don't have a data type annotation so we pre-cast it private const byte ByteCR = (byte)'\r'; + private const byte ByteLF = (byte)'\n'; // "7FFFFFFF\r\n" is the largest chunk size that could be returned as an int. private const int MaxChunkPrefixBytes = 10; @@ -27,6 +28,8 @@ internal sealed class Http1ChunkedEncodingMessageBody : Http1MessageBody private readonly Pipe _requestBodyPipe; private ReadResult _readResult; + private static readonly bool InsecureChunkedParsing = AppContext.TryGetSwitch("Microsoft.AspNetCore.Server.Kestrel.EnableInsecureChunkedRequestParsing", out var value) && value; + public Http1ChunkedEncodingMessageBody(Http1Connection context, bool keepAlive) : base(context, keepAlive) { @@ -345,15 +348,31 @@ private void ParseChunkedPrefix(in ReadOnlySequence buffer, out SequencePo KestrelBadHttpRequestException.Throw(RequestRejectionReason.BadChunkSizeData); } + // https://www.rfc-editor.org/rfc/rfc9112#section-7.1 + // chunk = chunk-size [ chunk-ext ] CRLF + // chunk-data CRLF + + // https://www.rfc-editor.org/rfc/rfc9112#section-7.1.1 + // chunk-ext = *( BWS ";" BWS chunk-ext-name + // [BWS "=" BWS chunk-ext-val] ) + // chunk-ext-name = token + // chunk-ext-val = token / quoted-string private void ParseExtension(ReadOnlySequence buffer, out SequencePosition consumed, out SequencePosition examined) { - // Chunk-extensions not currently parsed - // Just drain the data - examined = buffer.Start; + // Chunk-extensions parsed for \r\n and throws for unpaired \r or \n. do { - SequencePosition? extensionCursorPosition = buffer.PositionOf(ByteCR); + SequencePosition? extensionCursorPosition; + if (InsecureChunkedParsing) + { + extensionCursorPosition = buffer.PositionOf(ByteCR); + } + else + { + extensionCursorPosition = buffer.PositionOfAny(ByteCR, ByteLF); + } + if (extensionCursorPosition == null) { // End marker not found yet @@ -361,9 +380,10 @@ private void ParseExtension(ReadOnlySequence buffer, out SequencePosition examined = buffer.End; AddAndCheckObservedBytes(buffer.Length); return; - }; + } var extensionCursor = extensionCursorPosition.Value; + var charsToByteCRExclusive = buffer.Slice(0, extensionCursor).Length; var suffixBuffer = buffer.Slice(extensionCursor); @@ -378,7 +398,9 @@ private void ParseExtension(ReadOnlySequence buffer, out SequencePosition suffixBuffer = suffixBuffer.Slice(0, 2); var suffixSpan = suffixBuffer.ToSpan(); - if (suffixSpan[1] == '\n') + if (InsecureChunkedParsing + ? (suffixSpan[1] == ByteLF) + : (suffixSpan[0] == ByteCR && suffixSpan[1] == ByteLF)) { // We consumed the \r\n at the end of the extension, so switch modes. _mode = _inputLength > 0 ? Mode.Data : Mode.Trailer; @@ -387,13 +409,22 @@ private void ParseExtension(ReadOnlySequence buffer, out SequencePosition examined = suffixBuffer.End; AddAndCheckObservedBytes(charsToByteCRExclusive + 2); } - else + else if (InsecureChunkedParsing) { + examined = buffer.Start; // Don't consume suffixSpan[1] in case it is also a \r. buffer = buffer.Slice(charsToByteCRExclusive + 1); consumed = extensionCursor; AddAndCheckObservedBytes(charsToByteCRExclusive + 1); } + else + { + consumed = suffixBuffer.End; + examined = suffixBuffer.End; + + // We have \rX or \nX, that's an invalid extension. + KestrelBadHttpRequestException.Throw(RequestRejectionReason.BadChunkExtension); + } } while (_mode == Mode.Extension); } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http/Http1Connection.cs b/src/Servers/Kestrel/Core/src/Internal/Http/Http1Connection.cs index b8714d601f9c..178007ec06ac 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http/Http1Connection.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http/Http1Connection.cs @@ -3,7 +3,6 @@ using System.Buffers; using System.Diagnostics; -using System.Globalization; using System.IO.Pipelines; using Microsoft.AspNetCore.Connections; using Microsoft.AspNetCore.Connections.Features; @@ -644,16 +643,24 @@ private void ValidateNonOriginHostHeader(string hostText) // authority component, excluding any userinfo subcomponent and its "@" // delimiter. + // Accessing authority always allocates, store it in a local to only allocate once + var authority = _absoluteRequestTarget!.Authority; + // System.Uri doesn't not tell us if the port was in the original string or not. // When IsDefaultPort = true, we will allow Host: with or without the default port - if (hostText != _absoluteRequestTarget!.Authority) + if (hostText != authority) { if (!_absoluteRequestTarget.IsDefaultPort - || hostText != _absoluteRequestTarget.Authority + ":" + _absoluteRequestTarget.Port.ToString(CultureInfo.InvariantCulture)) + || hostText != $"{authority}:{_absoluteRequestTarget.Port}") { if (_context.ServiceContext.ServerOptions.AllowHostHeaderOverride) { - hostText = _absoluteRequestTarget.Authority + ":" + _absoluteRequestTarget.Port.ToString(CultureInfo.InvariantCulture); + // No need to include the port here, it's either already in the Authority + // or it's the default port + // see: https://datatracker.ietf.org/doc/html/rfc2616/#section-14.23 + // A "host" without any trailing port information implies the default + // port for the service requested (e.g., "80" for an HTTP URL). + hostText = authority; HttpRequestHeaders.HeaderHost = hostText; } else diff --git a/src/Servers/Kestrel/Core/src/Internal/Http/RequestRejectionReason.cs b/src/Servers/Kestrel/Core/src/Internal/Http/RequestRejectionReason.cs index 827192823023..91467c6cb046 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http/RequestRejectionReason.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http/RequestRejectionReason.cs @@ -16,6 +16,7 @@ internal enum RequestRejectionReason UnexpectedEndOfRequestContent, BadChunkSuffix, BadChunkSizeData, + BadChunkExtension, ChunkedRequestIncomplete, InvalidRequestTarget, InvalidCharactersInHeaderName, @@ -31,5 +32,5 @@ internal enum RequestRejectionReason ConnectMethodRequired, MissingHostHeader, MultipleHostHeaders, - InvalidHostHeader + InvalidHostHeader, } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Data.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Data.cs index 95dbbcb8e4d5..ce1e9b0db815 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Data.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Data.cs @@ -7,7 +7,7 @@ internal partial class Http3RawFrame { public void PrepareData() { - Length = 0; + RemainingLength = 0; Type = Http3FrameType.Data; } } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.GoAway.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.GoAway.cs index fe2eb3a6e42e..de1a73cb830e 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.GoAway.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.GoAway.cs @@ -7,7 +7,7 @@ internal partial class Http3RawFrame { public void PrepareGoAway() { - Length = 0; + RemainingLength = 0; Type = Http3FrameType.GoAway; } } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Headers.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Headers.cs index bcf65929694d..11e8c971ff21 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Headers.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Headers.cs @@ -7,7 +7,7 @@ internal partial class Http3RawFrame { public void PrepareHeaders() { - Length = 0; + RemainingLength = 0; Type = Http3FrameType.Headers; } } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Settings.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Settings.cs index 9e74e07db5b8..03ed2a670250 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Settings.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.Settings.cs @@ -7,7 +7,7 @@ internal partial class Http3RawFrame { public void PrepareSettings() { - Length = 0; + RemainingLength = 0; Type = Http3FrameType.Settings; } } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.cs index 076b9640d0bb..5839d515524c 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Frames/Http3RawFrame.cs @@ -9,7 +9,7 @@ namespace System.Net.Http; internal partial class Http3RawFrame #pragma warning restore CA1852 // Seal internal types { - public long Length { get; set; } + public long RemainingLength { get; set; } public Http3FrameType Type { get; internal set; } @@ -17,6 +17,6 @@ internal partial class Http3RawFrame public override string ToString() { - return $"{FormattedType} Length: {Length}"; + return $"{FormattedType} Length: {RemainingLength}"; } } diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3ControlStream.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3ControlStream.cs index dbd99d838a0e..c179676663ff 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3ControlStream.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3ControlStream.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Buffers; +using System.Diagnostics; using System.Globalization; using System.IO.Pipelines; using System.Net.Http; @@ -19,13 +20,18 @@ internal abstract class Http3ControlStream : IHttp3Stream, IThreadPoolWorkItem private const int EncoderStreamTypeId = 2; private const int DecoderStreamTypeId = 3; + // Arbitrarily chosen max frame length + // ControlStream frames currently are very small, either a single variable length integer (max 8 bytes), two variable length integers, + // or in the case of SETTINGS a small collection of two variable length integers + // We'll use a generous value of 10k in case new optional frame(s) are added that might be a little larger than the current frames. + private const int MaxFrameSize = 10_000; + private readonly Http3FrameWriter _frameWriter; private readonly Http3StreamContext _context; private readonly Http3PeerSettings _serverPeerSettings; private readonly IStreamIdFeature _streamIdFeature; private readonly IStreamClosedFeature _streamClosedFeature; private readonly IProtocolErrorCodeFeature _errorCodeFeature; - private readonly Http3RawFrame _incomingFrame = new Http3RawFrame(); private volatile int _isClosed; private long _headerType; private readonly object _completionLock = new(); @@ -159,9 +165,9 @@ private async ValueTask TryReadStreamHeaderAsync() { if (!readableBuffer.IsEmpty) { - var id = VariableLengthIntegerHelper.GetInteger(readableBuffer, out consumed, out examined); - if (id != -1) + if (VariableLengthIntegerHelper.TryGetInteger(readableBuffer, out consumed, out var id)) { + examined = consumed; return id; } } @@ -240,6 +246,8 @@ public async Task ProcessRequestAsync(IHttpApplication appli } finally { + await _context.StreamContext.DisposeAsync(); + ApplyCompletionFlag(StreamCompletionFlags.Completed); _context.StreamLifetimeHandler.OnStreamCompleted(this); } @@ -247,6 +255,8 @@ public async Task ProcessRequestAsync(IHttpApplication appli private async Task HandleControlStream() { + var incomingFrame = new Http3RawFrame(); + var isContinuedFrame = false; while (_isClosed == 0) { var result = await Input.ReadAsync(); @@ -259,12 +269,33 @@ private async Task HandleControlStream() if (!readableBuffer.IsEmpty) { // need to kick off httpprotocol process request async here. - while (Http3FrameReader.TryReadFrame(ref readableBuffer, _incomingFrame, out var framePayload)) + while (Http3FrameReader.TryReadFrame(ref readableBuffer, incomingFrame, isContinuedFrame, out var framePayload)) { - Log.Http3FrameReceived(_context.ConnectionId, _streamIdFeature.StreamId, _incomingFrame); - - consumed = examined = framePayload.End; - await ProcessHttp3ControlStream(framePayload); + Debug.Assert(incomingFrame.RemainingLength >= framePayload.Length); + + // Only log when parsing the beginning of the frame + if (!isContinuedFrame) + { + Log.Http3FrameReceived(_context.ConnectionId, _streamIdFeature.StreamId, incomingFrame); + } + + examined = framePayload.End; + await ProcessHttp3ControlStream(incomingFrame, isContinuedFrame, framePayload, out consumed); + + if (incomingFrame.RemainingLength == framePayload.Length) + { + Debug.Assert(framePayload.Slice(0, consumed).Length == framePayload.Length); + + incomingFrame.RemainingLength = 0; + isContinuedFrame = false; + } + else + { + incomingFrame.RemainingLength -= framePayload.Slice(0, consumed).Length; + isContinuedFrame = true; + + Debug.Assert(incomingFrame.RemainingLength > 0); + } } } @@ -294,56 +325,71 @@ private async ValueTask HandleEncodingDecodingTask() } } - private ValueTask ProcessHttp3ControlStream(in ReadOnlySequence payload) + private ValueTask ProcessHttp3ControlStream(Http3RawFrame incomingFrame, bool isContinuedFrame, in ReadOnlySequence payload, out SequencePosition consumed) { - switch (_incomingFrame.Type) + // default to consuming the entire payload, this is so that we don't need to set consumed from all the frame types that aren't implemented yet. + // individual frame types can set consumed if they're implemented and want to be able to partially consume the payload. + consumed = payload.End; + switch (incomingFrame.Type) { case Http3FrameType.Data: case Http3FrameType.Headers: case Http3FrameType.PushPromise: - // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-7.2 - throw new Http3ConnectionErrorException(CoreStrings.FormatHttp3ErrorUnsupportedFrameOnControlStream(_incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame); + // https://www.rfc-editor.org/rfc/rfc9114.html#section-8.1-2.12.1 + throw new Http3ConnectionErrorException(CoreStrings.FormatHttp3ErrorUnsupportedFrameOnControlStream(incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame); case Http3FrameType.Settings: - return ProcessSettingsFrameAsync(payload); + CheckMaxFrameSize(incomingFrame); + return ProcessSettingsFrameAsync(isContinuedFrame, payload, out consumed); case Http3FrameType.GoAway: - return ProcessGoAwayFrameAsync(); + return ProcessGoAwayFrameAsync(isContinuedFrame, incomingFrame, payload, out consumed); case Http3FrameType.CancelPush: - return ProcessCancelPushFrameAsync(); + return ProcessCancelPushFrameAsync(incomingFrame, payload, out consumed); case Http3FrameType.MaxPushId: - return ProcessMaxPushIdFrameAsync(); + return ProcessMaxPushIdFrameAsync(incomingFrame, payload, out consumed); default: - return ProcessUnknownFrameAsync(_incomingFrame.Type); + CheckMaxFrameSize(incomingFrame); + return ProcessUnknownFrameAsync(incomingFrame.Type); } - } - private ValueTask ProcessSettingsFrameAsync(ReadOnlySequence payload) - { - if (_haveReceivedSettingsFrame) + static void CheckMaxFrameSize(Http3RawFrame http3RawFrame) { - // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#name-settings - throw new Http3ConnectionErrorException(CoreStrings.Http3ErrorControlStreamMultipleSettingsFrames, Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame); + // Not part of the RFC, but it's a good idea to limit the size of frames when we know they're supposed to be small. + if (http3RawFrame.RemainingLength >= MaxFrameSize) + { + throw new Http3ConnectionErrorException(CoreStrings.FormatHttp3ControlStreamFrameTooLarge(http3RawFrame.FormattedType), Http3ErrorCode.FrameError, ConnectionEndReason.InvalidFrameLength); + } } + } - _haveReceivedSettingsFrame = true; - _streamClosedFeature.OnClosed(static state => + private ValueTask ProcessSettingsFrameAsync(bool isContinuedFrame, ReadOnlySequence payload, out SequencePosition consumed) + { + if (!isContinuedFrame) { - var stream = (Http3ControlStream)state!; - stream.OnStreamClosed(); - }, this); + if (_haveReceivedSettingsFrame) + { + // https://www.rfc-editor.org/rfc/rfc9114.html#section-7.2.4 + throw new Http3ConnectionErrorException(CoreStrings.Http3ErrorControlStreamMultipleSettingsFrames, Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame); + } + + _haveReceivedSettingsFrame = true; + _streamClosedFeature.OnClosed(static state => + { + var stream = (Http3ControlStream)state!; + stream.OnStreamClosed(); + }, this); + } while (true) { - var id = VariableLengthIntegerHelper.GetInteger(payload, out var consumed, out _); - if (id == -1) + if (!VariableLengthIntegerHelper.TryGetInteger(payload, out consumed, out var id)) { break; } - payload = payload.Slice(consumed); - - var value = VariableLengthIntegerHelper.GetInteger(payload, out consumed, out _); - if (value == -1) + if (!VariableLengthIntegerHelper.TryGetInteger(payload.Slice(consumed), out consumed, out var value)) { + // Reset consumed to very start even though we successfully read 1 varint. It's because we want to keep the id for when we have the value as well. + consumed = payload.Start; break; } @@ -382,37 +428,48 @@ private void ProcessSetting(long id, long value) } } - private ValueTask ProcessGoAwayFrameAsync() + private ValueTask ProcessGoAwayFrameAsync(bool isContinuedFrame, Http3RawFrame incomingFrame, ReadOnlySequence payload, out SequencePosition consumed) { - EnsureSettingsFrame(Http3FrameType.GoAway); + // https://www.rfc-editor.org/rfc/rfc9114.html#name-goaway + + // We've already triggered RequestClose since isContinuedFrame is only true + // after we've already parsed the frame type and called the processing function at least once. + if (!isContinuedFrame) + { + EnsureSettingsFrame(Http3FrameType.GoAway); - // StopProcessingNextRequest must be called before RequestClose to ensure it's considered client initiated. - _context.Connection.StopProcessingNextRequest(serverInitiated: false, ConnectionEndReason.ClientGoAway); - _context.ConnectionContext.Features.Get()?.RequestClose(); + // StopProcessingNextRequest must be called before RequestClose to ensure it's considered client initiated. + _context.Connection.StopProcessingNextRequest(serverInitiated: false, ConnectionEndReason.ClientGoAway); + _context.ConnectionContext.Features.Get()?.RequestClose(); + } - // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#name-goaway - // PUSH is not implemented so nothing to do. + // PUSH is not implemented but we still want to parse the frame to do error checking + ParseVarIntWithFrameLengthValidation(incomingFrame, payload, out consumed); // TODO: Double check the connection remains open. return default; } - private ValueTask ProcessCancelPushFrameAsync() + private ValueTask ProcessCancelPushFrameAsync(Http3RawFrame incomingFrame, ReadOnlySequence payload, out SequencePosition consumed) { + // https://www.rfc-editor.org/rfc/rfc9114.html#section-7.2.3 + EnsureSettingsFrame(Http3FrameType.CancelPush); - // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#name-cancel_push - // PUSH is not implemented so nothing to do. + // PUSH is not implemented but we still want to parse the frame to do error checking + ParseVarIntWithFrameLengthValidation(incomingFrame, payload, out consumed); return default; } - private ValueTask ProcessMaxPushIdFrameAsync() + private ValueTask ProcessMaxPushIdFrameAsync(Http3RawFrame incomingFrame, ReadOnlySequence payload, out SequencePosition consumed) { + // https://www.rfc-editor.org/rfc/rfc9114.html#section-7.2.7 + EnsureSettingsFrame(Http3FrameType.MaxPushId); - // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#name-cancel_push - // PUSH is not implemented so nothing to do. + // PUSH is not implemented but we still want to parse the frame to do error checking + ParseVarIntWithFrameLengthValidation(incomingFrame, payload, out consumed); return default; } @@ -426,6 +483,23 @@ private ValueTask ProcessUnknownFrameAsync(Http3FrameType frameType) return default; } + // Used for frame types that aren't (fully) implemented yet and contain a single var int as part of their framing. (CancelPush, MaxPushId, GoAway) + // We want to throw an error if the length field of the frame is larger than the spec defined format of the frame. + private static void ParseVarIntWithFrameLengthValidation(Http3RawFrame incomingFrame, ReadOnlySequence payload, out SequencePosition consumed) + { + if (!VariableLengthIntegerHelper.TryGetInteger(payload, out consumed, out _)) + { + return; + } + + if (incomingFrame.RemainingLength > payload.Slice(0, consumed).Length) + { + // https://www.rfc-editor.org/rfc/rfc9114.html#section-10.8 + // An implementation MUST ensure that the length of a frame exactly matches the length of the fields it contains. + throw new Http3ConnectionErrorException(CoreStrings.FormatHttp3ControlStreamFrameTooLarge(Http3Formatting.ToFormattedType(incomingFrame.Type)), Http3ErrorCode.FrameError, ConnectionEndReason.InvalidFrameLength); + } + } + private void EnsureSettingsFrame(Http3FrameType frameType) { if (!_haveReceivedSettingsFrame) diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameReader.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameReader.cs index 66740c710f10..2de0472483a1 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameReader.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameReader.cs @@ -19,36 +19,44 @@ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 | Frame Payload (*) ... +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ - internal static bool TryReadFrame(ref ReadOnlySequence readableBuffer, Http3RawFrame frame, out ReadOnlySequence framePayload) + // Reads and returns partial frames, don't rely on the frame being complete when using this method + // Set isContinuedFrame to true when expecting to read more of the previous frame + internal static bool TryReadFrame(ref ReadOnlySequence readableBuffer, Http3RawFrame frame, bool isContinuedFrame, out ReadOnlySequence framePayload) { framePayload = ReadOnlySequence.Empty; - SequencePosition consumed; + SequencePosition consumed = readableBuffer.Start; + var length = frame.RemainingLength; - var type = VariableLengthIntegerHelper.GetInteger(readableBuffer, out consumed, out _); - if (type == -1) + if (!isContinuedFrame) { - return false; - } + if (!VariableLengthIntegerHelper.TryGetInteger(readableBuffer, out consumed, out var type)) + { + return false; + } - var firstLengthBuffer = readableBuffer.Slice(consumed); + var firstLengthBuffer = readableBuffer.Slice(consumed); - var length = VariableLengthIntegerHelper.GetInteger(firstLengthBuffer, out consumed, out _); + if (!VariableLengthIntegerHelper.TryGetInteger(firstLengthBuffer, out consumed, out length)) + { + return false; + } - // Make sure the whole frame is buffered - if (length == -1) - { - return false; + frame.RemainingLength = length; + frame.Type = (Http3FrameType)type; } var startOfFramePayload = readableBuffer.Slice(consumed); - if (startOfFramePayload.Length < length) + + // Get all the available bytes or the rest of the frame whichever is less + length = Math.Min(startOfFramePayload.Length, length); + + // If we were expecting a non-empty payload, but haven't received any of it yet, + // there is nothing to process until we wait for more data. + if (length == 0 && frame.RemainingLength != 0) { return false; } - frame.Length = length; - frame.Type = (Http3FrameType)type; - // The remaining payload minus the extra fields framePayload = startOfFramePayload.Slice(0, length); readableBuffer = readableBuffer.Slice(framePayload.End); diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameWriter.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameWriter.cs index 44ade9362ea1..6b94153f80d1 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameWriter.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3FrameWriter.cs @@ -121,7 +121,7 @@ internal Task WriteSettingsAsync(List settings) WriteSettings(settings, buffer); // Advance pipe writer and flush - _outgoingFrame.Length = totalLength; + _outgoingFrame.RemainingLength = totalLength; _outputWriter.Advance(totalLength); return _outputWriter.FlushAsync().GetAsTask(); @@ -186,7 +186,7 @@ private void WriteDataUnsynchronized(in ReadOnlySequence data, long dataLe return; } - _outgoingFrame.Length = (int)dataLength; + _outgoingFrame.RemainingLength = (int)dataLength; WriteHeaderUnsynchronized(); @@ -209,7 +209,7 @@ void SplitAndWriteDataUnsynchronized(in ReadOnlySequence data, long dataLe do { var currentData = remainingData.Slice(0, dataPayloadLength); - _outgoingFrame.Length = dataPayloadLength; + _outgoingFrame.RemainingLength = dataPayloadLength; WriteHeaderUnsynchronized(); @@ -223,7 +223,7 @@ void SplitAndWriteDataUnsynchronized(in ReadOnlySequence data, long dataLe } while (dataLength > dataPayloadLength); - _outgoingFrame.Length = (int)dataLength; + _outgoingFrame.RemainingLength = (int)dataLength; WriteHeaderUnsynchronized(); @@ -240,7 +240,7 @@ internal ValueTask WriteGoAway(long id) var length = VariableLengthIntegerHelper.GetByteCount(id); - _outgoingFrame.Length = length; + _outgoingFrame.RemainingLength = length; WriteHeaderUnsynchronized(); @@ -253,10 +253,10 @@ internal ValueTask WriteGoAway(long id) private void WriteHeaderUnsynchronized() { _log.Http3FrameSending(_connectionId, _streamIdFeature.StreamId, _outgoingFrame); - var headerLength = WriteHeader(_outgoingFrame.Type, _outgoingFrame.Length, _outputWriter); + var headerLength = WriteHeader(_outgoingFrame.Type, _outgoingFrame.RemainingLength, _outputWriter); // We assume the payload will be written prior to the next flush. - _unflushedBytes += headerLength + _outgoingFrame.Length; + _unflushedBytes += headerLength + _outgoingFrame.RemainingLength; } public ValueTask Write100ContinueAsync() @@ -269,7 +269,7 @@ public ValueTask Write100ContinueAsync() } _outgoingFrame.PrepareHeaders(); - _outgoingFrame.Length = ContinueBytes.Length; + _outgoingFrame.RemainingLength = ContinueBytes.Length; WriteHeaderUnsynchronized(); _outputWriter.Write(ContinueBytes); return TimeFlushUnsynchronizedAsync(); @@ -394,7 +394,7 @@ private void FinishWritingHeaders(int payloadLength, bool done) ValidateHeadersTotalSize(); - _outgoingFrame.Length = _headerEncodingBuffer.WrittenCount; + _outgoingFrame.RemainingLength = _headerEncodingBuffer.WrittenCount; WriteHeaderUnsynchronized(); _outputWriter.Write(_headerEncodingBuffer.WrittenSpan); diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3PendingStream.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3PendingStream.cs index b6db0bb810db..7dabb9654c56 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3PendingStream.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3PendingStream.cs @@ -60,8 +60,7 @@ public async ValueTask ReadNextStreamHeaderAsync(Http3StreamContext contex if (!readableBuffer.IsEmpty) { - var value = VariableLengthIntegerHelper.GetInteger(readableBuffer, out consumed, out _); - if (value != -1) + if (VariableLengthIntegerHelper.TryGetInteger(readableBuffer, out consumed, out var value)) { if (!advanceOn.HasValue || value == (long)advanceOn) { diff --git a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3Stream.cs b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3Stream.cs index 795fdc42b521..832ba5b7540a 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Http3/Http3Stream.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Http3/Http3Stream.cs @@ -59,7 +59,6 @@ internal abstract partial class Http3Stream : HttpProtocol, IHttp3Stream, IHttpS private readonly object _completionLock = new(); protected RequestHeaderParsingState _requestHeaderParsingState; - protected readonly Http3RawFrame _incomingFrame = new(); public bool EndStreamReceived => (_completionState & StreamCompletionFlags.EndStreamReceived) == StreamCompletionFlags.EndStreamReceived; public bool IsAborted => (_completionState & StreamCompletionFlags.Aborted) == StreamCompletionFlags.Aborted; @@ -609,6 +608,8 @@ public async Task ProcessRequestAsync(IHttpApplication appli try { + var incomingFrame = new Http3RawFrame(); + var isContinuedFrame = false; while (_isClosed == 0) { var result = await Input.ReadAsync(); @@ -620,12 +621,19 @@ public async Task ProcessRequestAsync(IHttpApplication appli { if (!readableBuffer.IsEmpty) { - while (Http3FrameReader.TryReadFrame(ref readableBuffer, _incomingFrame, out var framePayload)) + while (Http3FrameReader.TryReadFrame(ref readableBuffer, incomingFrame, isContinuedFrame, out var framePayload)) { - Log.Http3FrameReceived(ConnectionId, _streamIdFeature.StreamId, _incomingFrame); + // Only log when parsing the beginning of the frame + if (!isContinuedFrame) + { + Log.Http3FrameReceived(ConnectionId, _streamIdFeature.StreamId, incomingFrame); + } consumed = examined = framePayload.End; - await ProcessHttp3Stream(application, framePayload, result.IsCompleted && readableBuffer.IsEmpty); + await ProcessHttp3Stream(application, incomingFrame, isContinuedFrame, framePayload, result.IsCompleted && readableBuffer.IsEmpty); + + incomingFrame.RemainingLength -= framePayload.Length; + isContinuedFrame = incomingFrame.RemainingLength > 0 ? true : false; } } @@ -748,22 +756,23 @@ private ValueTask OnEndStreamReceived() return RequestBodyPipe.Writer.CompleteAsync(); } - private Task ProcessHttp3Stream(IHttpApplication application, in ReadOnlySequence payload, bool isCompleted) where TContext : notnull + private Task ProcessHttp3Stream(IHttpApplication application, Http3RawFrame incomingFrame, bool isContinuedFrame, + in ReadOnlySequence payload, bool isCompleted) where TContext : notnull { - return _incomingFrame.Type switch + return incomingFrame.Type switch { Http3FrameType.Data => ProcessDataFrameAsync(payload), - Http3FrameType.Headers => ProcessHeadersFrameAsync(application, payload, isCompleted), + Http3FrameType.Headers => ProcessHeadersFrameAsync(application, incomingFrame, isContinuedFrame, payload, isCompleted), // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-7.2.4 // These frames need to be on a control stream Http3FrameType.Settings or Http3FrameType.CancelPush or Http3FrameType.GoAway or Http3FrameType.MaxPushId => throw new Http3ConnectionErrorException( - CoreStrings.FormatHttp3ErrorUnsupportedFrameOnRequestStream(_incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame), + CoreStrings.FormatHttp3ErrorUnsupportedFrameOnRequestStream(incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame), // The server should never receive push promise Http3FrameType.PushPromise => throw new Http3ConnectionErrorException( - CoreStrings.FormatHttp3ErrorUnsupportedFrameOnServer(_incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame), + CoreStrings.FormatHttp3ErrorUnsupportedFrameOnServer(incomingFrame.FormattedType), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame), _ => ProcessUnknownFrameAsync(), }; } @@ -775,11 +784,13 @@ private static Task ProcessUnknownFrameAsync() return Task.CompletedTask; } - private async Task ProcessHeadersFrameAsync(IHttpApplication application, ReadOnlySequence payload, bool isCompleted) where TContext : notnull + private async Task ProcessHeadersFrameAsync(IHttpApplication application, Http3RawFrame incomingFrame, bool isContinuedFrame, + ReadOnlySequence payload, bool isCompleted) where TContext : notnull { // HEADERS frame after trailing headers is invalid. // https://quicwg.org/base-drafts/draft-ietf-quic-http.html#section-4.1 - if (_requestHeaderParsingState == RequestHeaderParsingState.Trailers) + // Since we parse data as we get it, we can receive partial frames which means we need to check that we're in the middle of handling the trailers header frame + if (_requestHeaderParsingState == RequestHeaderParsingState.Trailers && !isContinuedFrame) { throw new Http3ConnectionErrorException(CoreStrings.FormatHttp3StreamErrorFrameReceivedAfterTrailers(Http3Formatting.ToFormattedType(Http3FrameType.Headers)), Http3ErrorCode.UnexpectedFrame, ConnectionEndReason.UnexpectedFrame); } @@ -791,8 +802,17 @@ private async Task ProcessHeadersFrameAsync(IHttpApplication try { - QPackDecoder.Decode(payload, endHeaders: true, handler: this); - QPackDecoder.Reset(); + var endHeaders = payload.Length == incomingFrame.RemainingLength; + QPackDecoder.Decode(payload, endHeaders, handler: this); + if (endHeaders) + { + QPackDecoder.Reset(); + } + else + { + // Headers frame isn't complete, return to read more of the frame + return; + } } catch (QPackDecodingException ex) { diff --git a/src/Servers/Kestrel/Core/src/Internal/Infrastructure/KestrelTrace.Http3.cs b/src/Servers/Kestrel/Core/src/Internal/Infrastructure/KestrelTrace.Http3.cs index 4159c927e531..54e32f258f00 100644 --- a/src/Servers/Kestrel/Core/src/Internal/Infrastructure/KestrelTrace.Http3.cs +++ b/src/Servers/Kestrel/Core/src/Internal/Infrastructure/KestrelTrace.Http3.cs @@ -37,7 +37,7 @@ public void Http3FrameReceived(string connectionId, long streamId, Http3RawFrame { if (_http3Logger.IsEnabled(LogLevel.Trace)) { - Http3Log.Http3FrameReceived(_http3Logger, connectionId, Http3Formatting.ToFormattedType(frame.Type), streamId, frame.Length); + Http3Log.Http3FrameReceived(_http3Logger, connectionId, Http3Formatting.ToFormattedType(frame.Type), streamId, frame.RemainingLength); } } @@ -45,7 +45,7 @@ public void Http3FrameSending(string connectionId, long streamId, Http3RawFrame { if (_http3Logger.IsEnabled(LogLevel.Trace)) { - Http3Log.Http3FrameSending(_http3Logger, connectionId, Http3Formatting.ToFormattedType(frame.Type), streamId, frame.Length); + Http3Log.Http3FrameSending(_http3Logger, connectionId, Http3Formatting.ToFormattedType(frame.Type), streamId, frame.RemainingLength); } } diff --git a/src/Servers/Kestrel/Core/src/Internal/KestrelServerImpl.cs b/src/Servers/Kestrel/Core/src/Internal/KestrelServerImpl.cs index cefdd3d65282..6bcede93dbea 100644 --- a/src/Servers/Kestrel/Core/src/Internal/KestrelServerImpl.cs +++ b/src/Servers/Kestrel/Core/src/Internal/KestrelServerImpl.cs @@ -39,8 +39,9 @@ public KestrelServerImpl( IEnumerable multiplexedFactories, IHttpsConfigurationService httpsConfigurationService, ILoggerFactory loggerFactory, + DiagnosticSource? diagnosticSource, KestrelMetrics metrics) - : this(transportFactories, multiplexedFactories, httpsConfigurationService, CreateServiceContext(options, loggerFactory, diagnosticSource: null, metrics)) + : this(transportFactories, multiplexedFactories, httpsConfigurationService, CreateServiceContext(options, loggerFactory, diagnosticSource, metrics)) { } @@ -111,7 +112,8 @@ private static ServiceContext CreateServiceContext(IOptions ServiceContext.ServerOptions; - private ServiceContext ServiceContext { get; } + // Internal for testing + internal ServiceContext ServiceContext { get; } private KestrelTrace Trace => ServiceContext.Log; diff --git a/src/Servers/Kestrel/Core/src/KestrelBadHttpRequestException.cs b/src/Servers/Kestrel/Core/src/KestrelBadHttpRequestException.cs index 05ae34f89802..6bfa5bfe60c4 100644 --- a/src/Servers/Kestrel/Core/src/KestrelBadHttpRequestException.cs +++ b/src/Servers/Kestrel/Core/src/KestrelBadHttpRequestException.cs @@ -49,6 +49,9 @@ internal static BadHttpRequestException GetException(RequestRejectionReason reas case RequestRejectionReason.BadChunkSizeData: ex = new BadHttpRequestException(CoreStrings.BadRequest_BadChunkSizeData, StatusCodes.Status400BadRequest, reason); break; + case RequestRejectionReason.BadChunkExtension: + ex = new BadHttpRequestException(CoreStrings.BadRequest_BadChunkExtension, StatusCodes.Status400BadRequest, reason); + break; case RequestRejectionReason.ChunkedRequestIncomplete: ex = new BadHttpRequestException(CoreStrings.BadRequest_ChunkedRequestIncomplete, StatusCodes.Status400BadRequest, reason); break; diff --git a/src/Servers/Kestrel/Core/src/KestrelServer.cs b/src/Servers/Kestrel/Core/src/KestrelServer.cs index 75cec0130767..7f2909c77cf6 100644 --- a/src/Servers/Kestrel/Core/src/KestrelServer.cs +++ b/src/Servers/Kestrel/Core/src/KestrelServer.cs @@ -36,6 +36,7 @@ public KestrelServer(IOptions options, IConnectionListener Array.Empty(), new SimpleHttpsConfigurationService(), loggerFactory, + diagnosticSource: null, new KestrelMetrics(new DummyMeterFactory())); } diff --git a/src/Servers/Kestrel/Core/test/KestrelServerTests.cs b/src/Servers/Kestrel/Core/test/KestrelServerTests.cs index a0709d00ad19..e688812a6075 100644 --- a/src/Servers/Kestrel/Core/test/KestrelServerTests.cs +++ b/src/Servers/Kestrel/Core/test/KestrelServerTests.cs @@ -309,6 +309,7 @@ private static KestrelServerImpl CreateKestrelServer( multiplexedFactories, httpsConfigurationService, loggerFactory ?? new LoggerFactory(new[] { new KestrelTestLoggerProvider() }), + diagnosticSource: null, metrics ?? new KestrelMetrics(new TestMeterFactory())); } diff --git a/src/Servers/Kestrel/Core/test/MessageBodyTests.cs b/src/Servers/Kestrel/Core/test/MessageBodyTests.cs index 8cd5db8c8cbe..d9ee3c979a11 100644 --- a/src/Servers/Kestrel/Core/test/MessageBodyTests.cs +++ b/src/Servers/Kestrel/Core/test/MessageBodyTests.cs @@ -338,14 +338,14 @@ public async Task ReadExitsGivenIncompleteChunkedExtension() var stream = new HttpRequestStream(Mock.Of(), reader); reader.StartAcceptingReads(body); - input.Add("5;\r\0"); + input.Add("5;\r"); var buffer = new byte[1024]; var readTask = stream.ReadAsync(buffer, 0, buffer.Length); Assert.False(readTask.IsCompleted); - input.Add("\r\r\r\nHello\r\n0\r\n\r\n"); + input.Add("\nHello\r\n0\r\n\r\n"); Assert.Equal(5, await readTask.DefaultTimeout()); try diff --git a/src/Servers/Kestrel/Core/test/VariableIntHelperTests.cs b/src/Servers/Kestrel/Core/test/VariableIntHelperTests.cs index 8b73bd0e2c48..f8fa53170829 100644 --- a/src/Servers/Kestrel/Core/test/VariableIntHelperTests.cs +++ b/src/Servers/Kestrel/Core/test/VariableIntHelperTests.cs @@ -14,7 +14,8 @@ public class VariableIntHelperTests [MemberData(nameof(IntegerData))] public void CheckDecoding(long expected, byte[] input) { - var decoded = VariableLengthIntegerHelper.GetInteger(new ReadOnlySequence(input), out _, out _); + var result = VariableLengthIntegerHelper.TryGetInteger(new ReadOnlySequence(input), out _, out var decoded); + Assert.True(result); Assert.Equal(expected, decoded); } diff --git a/src/Servers/Kestrel/Kestrel/test/WebHostBuilderKestrelExtensionsTests.cs b/src/Servers/Kestrel/Kestrel/test/WebHostBuilderKestrelExtensionsTests.cs index 759d074a6d82..b24da893ab53 100644 --- a/src/Servers/Kestrel/Kestrel/test/WebHostBuilderKestrelExtensionsTests.cs +++ b/src/Servers/Kestrel/Kestrel/test/WebHostBuilderKestrelExtensionsTests.cs @@ -2,10 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Collections; +using System.IO.Pipelines; using Microsoft.AspNetCore.Connections; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting.Server; using Microsoft.AspNetCore.Server.Kestrel.Core; +using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Infrastructure; using Microsoft.AspNetCore.Server.Kestrel.Transport.NamedPipes.Internal; using Microsoft.AspNetCore.Server.Kestrel.Transport.Sockets; using Microsoft.Extensions.DependencyInjection; @@ -107,6 +109,11 @@ public void ServerIsKestrelServerImpl() .UseKestrel() .Configure(app => { }); - Assert.IsType(hostBuilder.Build().Services.GetService()); + var server = Assert.IsType(hostBuilder.Build().Services.GetService()); + + Assert.NotNull(server.ServiceContext.DiagnosticSource); + Assert.IsType(server.ServiceContext.Metrics); + Assert.Equal(PipeScheduler.ThreadPool, server.ServiceContext.Scheduler); + Assert.Equal(TimeProvider.System, server.ServiceContext.TimeProvider); } } diff --git a/src/Servers/Kestrel/shared/test/Http3/Http3InMemory.cs b/src/Servers/Kestrel/shared/test/Http3/Http3InMemory.cs index 64ebcdb07b41..9cae05272fab 100644 --- a/src/Servers/Kestrel/shared/test/Http3/Http3InMemory.cs +++ b/src/Servers/Kestrel/shared/test/Http3/Http3InMemory.cs @@ -395,15 +395,15 @@ private static long GetOutputResponseBufferSize(ServiceContext serviceContext) return bufferSize ?? 0; } - internal ValueTask CreateControlStream() + internal ValueTask CreateControlStream(PipeScheduler clientWriterScheduler = null) { - return CreateControlStream(id: 0); + return CreateControlStream(id: 0, clientWriterScheduler); } - internal async ValueTask CreateControlStream(int? id) + internal async ValueTask CreateControlStream(int? id, PipeScheduler clientWriterScheduler = null) { var testStreamContext = new TestStreamContext(canRead: true, canWrite: false, this); - testStreamContext.Initialize(streamId: 2); + testStreamContext.Initialize(streamId: 2, clientWriterScheduler); var stream = new Http3ControlStream(this, testStreamContext); _runningStreams[stream.StreamId] = stream; @@ -416,16 +416,17 @@ internal async ValueTask CreateControlStream(int? id) return stream; } - internal async ValueTask CreateRequestStream(IEnumerable> headers, Http3RequestHeaderHandler headerHandler = null, bool endStream = false, TaskCompletionSource tsc = null) + internal async ValueTask CreateRequestStream(IEnumerable> headers, + Http3RequestHeaderHandler headerHandler = null, bool endStream = false, TaskCompletionSource tsc = null, PipeScheduler clientWriterScheduler = null) { - var stream = CreateRequestStreamCore(headerHandler); + var stream = CreateRequestStreamCore(headerHandler, clientWriterScheduler); if (tsc is not null) { stream.StartStreamDisposeTcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); } - if (headers is not null) + if (headers is not null && headers.Any()) { await stream.SendHeadersAsync(headers, endStream); } @@ -437,9 +438,10 @@ internal async ValueTask CreateRequestStream(IEnumerable CreateRequestStream(Http3HeadersEnumerator headers, Http3RequestHeaderHandler headerHandler = null, bool endStream = false, TaskCompletionSource tsc = null) + internal async ValueTask CreateRequestStream(Http3HeadersEnumerator headers, Http3RequestHeaderHandler headerHandler = null, + bool endStream = false, TaskCompletionSource tsc = null, PipeScheduler clientWriterScheduler = null) { - var stream = CreateRequestStreamCore(headerHandler); + var stream = CreateRequestStreamCore(headerHandler, clientWriterScheduler); if (tsc is not null) { @@ -455,7 +457,7 @@ internal async ValueTask CreateRequestStream(Http3HeadersEnu return stream; } - private Http3RequestStream CreateRequestStreamCore(Http3RequestHeaderHandler headerHandler) + private Http3RequestStream CreateRequestStreamCore(Http3RequestHeaderHandler headerHandler, PipeScheduler clientWriterScheduler) { var requestStreamId = GetStreamId(0x00); if (!_streamContextPool.TryDequeue(out var testStreamContext)) @@ -466,7 +468,7 @@ private Http3RequestStream CreateRequestStreamCore(Http3RequestHeaderHandler hea { Logger.LogDebug($"Reusing context for request stream {requestStreamId}."); } - testStreamContext.Initialize(requestStreamId); + testStreamContext.Initialize(requestStreamId, clientWriterScheduler); return new Http3RequestStream(this, Connection, testStreamContext, headerHandler ?? new Http3RequestHeaderHandler()); } @@ -566,7 +568,7 @@ internal async ValueTask ReceiveFrameAsync(bool expectEnd throw new InvalidOperationException("No data received."); } - if (Http3FrameReader.TryReadFrame(ref buffer, frame, out var framePayload)) + if (Http3FrameReader.TryReadFrame(ref buffer, frame, isContinuedFrame: false, out var framePayload)) { consumed = examined = framePayload.End; frame.Payload = framePayload.ToArray(); @@ -844,16 +846,14 @@ internal async ValueTask> ExpectSettingsAsync() var settings = new Dictionary(); while (true) { - var id = VariableLengthIntegerHelper.GetInteger(payload, out var consumed, out _); - if (id == -1) + if (!VariableLengthIntegerHelper.TryGetInteger(payload, out var consumed, out var id)) { break; } payload = payload.Slice(consumed); - var value = VariableLengthIntegerHelper.GetInteger(payload, out consumed, out _); - if (value == -1) + if (!VariableLengthIntegerHelper.TryGetInteger(payload, out consumed, out var value)) { break; } @@ -934,9 +934,9 @@ public async ValueTask TryReadStreamIdAsync() { if (!readableBuffer.IsEmpty) { - var id = VariableLengthIntegerHelper.GetInteger(readableBuffer, out consumed, out examined); - if (id != -1) + if (VariableLengthIntegerHelper.TryGetInteger(readableBuffer, out consumed, out var id)) { + examined = consumed; return id; } } @@ -1013,6 +1013,7 @@ public TestMultiplexedConnectionContext(Http3InMemory testBase) Features.Set(this); Features.Set(this); ConnectionClosedRequested = ConnectionClosingCts.Token; + ConnectionClosed = ConnectionClosedCts.Token; MetricsContext = TestContextFactory.CreateMetricsContext(this); } @@ -1027,6 +1028,8 @@ public TestMultiplexedConnectionContext(Http3InMemory testBase) public CancellationTokenSource ConnectionClosingCts { get; set; } = new CancellationTokenSource(); + public CancellationTokenSource ConnectionClosedCts { get; set; } = new CancellationTokenSource(); + public long Error { get => _error ?? -1; @@ -1046,6 +1049,7 @@ public override void Abort(ConnectionAbortedException abortReason) { ToServerAcceptQueue.Writer.TryComplete(); ToClientAcceptQueue.Writer.TryComplete(); + ConnectionClosedCts.Cancel(); } public override async ValueTask AcceptAsync(CancellationToken cancellationToken = default) @@ -1119,38 +1123,30 @@ public TestStreamContext(bool canRead, bool canWrite, Http3InMemory testBase) _testBase = testBase; } - public void Initialize(long streamId) + public void Initialize(long streamId, PipeScheduler clientWriterScheduler = null) { - if (!_isComplete) - { - // Create new pipes when test stream context is reused rather than reseting them. - // This is required because the client tests read from these directly from these pipes. - // When a request is finished they'll check to see whether there is anymore content - // in the Application.Output pipe. If it has been reset then that code will error. - var inputOptions = Http3InMemory.GetInputPipeOptions(_testBase._serviceContext, _testBase._memoryPool, PipeScheduler.ThreadPool); - var outputOptions = Http3InMemory.GetOutputPipeOptions(_testBase._serviceContext, _testBase._memoryPool, PipeScheduler.ThreadPool); - - _inputPipe = new Pipe(inputOptions); - _outputPipe = new Pipe(outputOptions); - - _transportPipeReader = new CompletionPipeReader(_inputPipe.Reader); - _transportPipeWriter = new CompletionPipeWriter(_outputPipe.Writer); - - _pair = new DuplexPipePair( - new DuplexPipe(_transportPipeReader, _transportPipeWriter), - new DuplexPipe(_outputPipe.Reader, _inputPipe.Writer)); - } - else + if (_isComplete) { _pair.Application.Input.Complete(); _pair.Application.Output.Complete(); + } - _transportPipeReader.Reset(); - _transportPipeWriter.Reset(); + // Create new pipes when test stream context is reused rather than reseting them. + // This is required because the client tests read from these directly from these pipes. + // When a request is finished they'll check to see whether there is anymore content + // in the Application.Output pipe. If it has been reset then that code will error. + var inputOptions = Http3InMemory.GetInputPipeOptions(_testBase._serviceContext, _testBase._memoryPool, clientWriterScheduler ?? PipeScheduler.ThreadPool); + var outputOptions = Http3InMemory.GetOutputPipeOptions(_testBase._serviceContext, _testBase._memoryPool, PipeScheduler.ThreadPool); - _inputPipe.Reset(); - _outputPipe.Reset(); - } + _inputPipe = new Pipe(inputOptions); + _outputPipe = new Pipe(outputOptions); + + _transportPipeReader = new CompletionPipeReader(_inputPipe.Reader); + _transportPipeWriter = new CompletionPipeWriter(_outputPipe.Writer); + + _pair = new DuplexPipePair( + new DuplexPipe(_transportPipeReader, _transportPipeWriter), + new DuplexPipe(_outputPipe.Reader, _inputPipe.Writer)); Features.Set(this); Features.Set(this); diff --git a/src/Servers/Kestrel/shared/test/HttpParsingData.cs b/src/Servers/Kestrel/shared/test/HttpParsingData.cs index 6b240e18b5c8..a301e27e3877 100644 --- a/src/Servers/Kestrel/shared/test/HttpParsingData.cs +++ b/src/Servers/Kestrel/shared/test/HttpParsingData.cs @@ -497,8 +497,10 @@ public static TheoryData HostHeaderData { "GET /pub/WWW/", "www.example.org" }, { "GET http://localhost/", "localhost" }, { "GET http://localhost:80/", "localhost:80" }, + { "GET http://localhost:80/", "localhost" }, { "GET https://localhost/", "localhost" }, { "GET https://localhost:443/", "localhost:443" }, + { "GET https://localhost:443/", "localhost" }, { "CONNECT asp.net:80", "asp.net:80" }, { "CONNECT asp.net:443", "asp.net:443" }, { "CONNECT user-images.githubusercontent.com:443", "user-images.githubusercontent.com:443" }, @@ -534,10 +536,13 @@ public static TheoryData HostHeaderInvalidData data.Add("CONNECT contoso.com", host); } - // port mismatch when target contains port + // port mismatch when target contains default https port data.Add("GET https://contoso.com:443/", "contoso.com:5000"); data.Add("CONNECT contoso.com:443", "contoso.com:5000"); + // port mismatch when target contains default http port + data.Add("GET http://contoso.com:80/", "contoso.com:5000"); + return data; } } diff --git a/src/Servers/Kestrel/test/InMemory.FunctionalTests/BadHttpRequestTests.cs b/src/Servers/Kestrel/test/InMemory.FunctionalTests/BadHttpRequestTests.cs index d7076dacfd4c..af9d93aea3d6 100644 --- a/src/Servers/Kestrel/test/InMemory.FunctionalTests/BadHttpRequestTests.cs +++ b/src/Servers/Kestrel/test/InMemory.FunctionalTests/BadHttpRequestTests.cs @@ -153,9 +153,12 @@ public Task BadRequestIfHostHeaderDoesNotMatchRequestTarget(string requestTarget } [Theory] - [InlineData("Host: www.foo.comConnection: keep-alive")] // Corrupted - missing line-break - [InlineData("Host: www.notfoo.com")] // Syntactically correct but not matching - public async Task CanOptOutOfBadRequestIfHostHeaderDoesNotMatchRequestTarget(string hostHeader) + [InlineData("http://www.foo.com", "Host: www.foo.comConnection: keep-alive", "www.foo.com")] // Corrupted - missing line-break + [InlineData("http://www.foo.com/", "Host: www.notfoo.com", "www.foo.com")] // Syntactically correct but not matching + [InlineData("http://www.foo.com:80", "Host: www.notfoo.com", "www.foo.com")] // Explicit default port in request string + [InlineData("http://www.foo.com:5129", "Host: www.foo.com", "www.foo.com:5129")] // Non-default port in request string + [InlineData("http://www.foo.com:5129", "Host: www.foo.com:5128", "www.foo.com:5129")] // Different port in host header + public async Task CanOptOutOfBadRequestIfHostHeaderDoesNotMatchRequestTarget(string requestString, string hostHeader, string expectedHost) { var testMeterFactory = new TestMeterFactory(); using var connectionDuration = new MetricCollector(testMeterFactory, "Microsoft.AspNetCore.Server.Kestrel", "kestrel.connection.duration"); @@ -175,13 +178,13 @@ public async Task CanOptOutOfBadRequestIfHostHeaderDoesNotMatchRequestTarget(str { using (var client = server.CreateConnection()) { - await client.SendAll($"GET http://www.foo.com/api/data HTTP/1.1\r\n{hostHeader}\r\n\r\n"); + await client.SendAll($"GET {requestString} HTTP/1.1\r\n{hostHeader}\r\n\r\n"); await client.Receive("HTTP/1.1 200 OK"); } } - Assert.Equal("www.foo.com:80", receivedHost); + Assert.Equal(expectedHost, receivedHost); Assert.Collection(connectionDuration.GetMeasurementSnapshot(), m => MetricsAssert.NoError(m.Tags)); } diff --git a/src/Servers/Kestrel/test/InMemory.FunctionalTests/ChunkedRequestTests.cs b/src/Servers/Kestrel/test/InMemory.FunctionalTests/ChunkedRequestTests.cs index f5ee2b28228e..7859716d6ff1 100644 --- a/src/Servers/Kestrel/test/InMemory.FunctionalTests/ChunkedRequestTests.cs +++ b/src/Servers/Kestrel/test/InMemory.FunctionalTests/ChunkedRequestTests.cs @@ -4,6 +4,7 @@ using System.Buffers; using System.Globalization; using System.Text; +using Microsoft.AspNetCore.Hosting.Server; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.InternalTesting; using Microsoft.AspNetCore.Server.Kestrel.Core; @@ -18,6 +19,70 @@ namespace Microsoft.AspNetCore.Server.Kestrel.InMemory.FunctionalTests; public class ChunkedRequestTests : LoggedTest { + [Theory] + [InlineData("2;\rxx\r\nxy\r\n0")] // \r in chunk extensions + [InlineData("2;\nxx\r\nxy\r\n0")] // \n in chunk extensions + public async Task RejectsInvalidChunkExtensions(string invalidChunkLine) + { + var testContext = new TestServiceContext(LoggerFactory); + + await using (var server = new TestServer(AppChunked, testContext)) + { + using (var connection = server.CreateConnection()) + { + await connection.Send( + "POST / HTTP/1.1", + "Host:", + "Transfer-Encoding: chunked", + "Content-Type: text/plain", + "", + invalidChunkLine, + "", + ""); + await connection.ReceiveEnd( + "HTTP/1.1 400 Bad Request", + "Content-Length: 0", + "Connection: close", + $"Date: {testContext.DateHeaderValue}", + "", + ""); + } + } + } + + [Theory] + [InlineData("2;a=b;b=c\r\nxy\r\n0")] // Multiple chunk extensions + [InlineData("2; \r\nxy\r\n0")] // Space in chunk extensions (BWS) + [InlineData("2;;;\r\nxy\r\n0")] // Multiple ';' in chunk extensions + [InlineData("2;novalue\r\nxy\r\n0")] // Name only chunk extension + //[InlineData("2 ;\r\nxy\r\n0")] // Technically allowed per spec, but we never supported it, and no one should be sending it + public async Task AllowsValidChunkExtensions(string chunkLine) + { + var testContext = new TestServiceContext(LoggerFactory); + + await using (var server = new TestServer(AppChunked, testContext)) + { + using (var connection = server.CreateConnection()) + { + await connection.Send( + "POST / HTTP/1.1", + "Host:", + "Transfer-Encoding: chunked", + "Content-Type: text/plain", + "", + chunkLine, + "", + ""); + await connection.Receive( + "HTTP/1.1 200 OK", + "Content-Length: 2", + $"Date: {testContext.DateHeaderValue}", + "", + "xy"); + } + } + } + private async Task App(HttpContext httpContext) { var request = httpContext.Request; @@ -1117,4 +1182,86 @@ await connection.Receive( } } } + + [Fact] + public async Task MultiReadWithInvalidNewlineAcrossReads() + { + // Inline so that we know when the first connection.Send has been parsed so we can send the next part + var testContext = new TestServiceContext(LoggerFactory) + { Scheduler = System.IO.Pipelines.PipeScheduler.Inline }; + + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + await using (var server = new TestServer(async httpContext => + { + var request = httpContext.Request; + var readTask = request.BodyReader.ReadAsync(); + tcs.TrySetResult(); + var readResult = await readTask; + request.BodyReader.AdvanceTo(readResult.Buffer.End); + }, testContext)) + { + using (var connection = server.CreateConnection()) + { + await connection.SendAll( + "GET / HTTP/1.1", + "Host:", + "Transfer-Encoding: chunked", + "", + "1;\r"); + await tcs.Task; + await connection.SendAll( + "\r"); + + await connection.ReceiveEnd( + "HTTP/1.1 400 Bad Request", + "Content-Length: 0", + "Connection: close", + $"Date: {testContext.DateHeaderValue}", + "", + ""); + } + } + } + + [Fact] + public async Task InvalidNewlineInFirstReadWithPartialChunkExtension() + { + // Inline so that we know when the first connection.Send has been parsed so we can send the next part + var testContext = new TestServiceContext(LoggerFactory) + { Scheduler = System.IO.Pipelines.PipeScheduler.Inline }; + + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + await using (var server = new TestServer(async httpContext => + { + var request = httpContext.Request; + var readTask = request.BodyReader.ReadAsync(); + tcs.TrySetResult(); + var readResult = await readTask; + request.BodyReader.AdvanceTo(readResult.Buffer.End); + }, testContext)) + { + using (var connection = server.CreateConnection()) + { + await connection.SendAll( + "GET / HTTP/1.1", + "Host:", + "Transfer-Encoding: chunked", + "", + "1;\n"); + await tcs.Task; + await connection.SendAll( + "t"); + + await connection.ReceiveEnd( + "HTTP/1.1 400 Bad Request", + "Content-Length: 0", + "Connection: close", + $"Date: {testContext.DateHeaderValue}", + "", + ""); + } + } + } } diff --git a/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3ConnectionTests.cs b/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3ConnectionTests.cs index 06d96adc238f..ab8bc5e9a1e6 100644 --- a/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3ConnectionTests.cs +++ b/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3ConnectionTests.cs @@ -360,6 +360,35 @@ await Http3Api.WaitForConnectionErrorAsync( MetricsAssert.Equal(ConnectionEndReason.ClosedCriticalStream, Http3Api.ConnectionTags); } + [Theory] + [InlineData((int)Http3FrameType.Settings, 20_000)] + //[InlineData((int)Http3FrameType.GoAway, 30)] // GoAway frames trigger graceful connection close which races with sending FRAME_ERROR + [InlineData((int)Http3FrameType.CancelPush, 30)] + [InlineData((int)Http3FrameType.MaxPushId, 30)] + [InlineData(int.MaxValue, 20_000)] // Unknown frame type + public async Task ControlStream_ClientToServer_LargeFrame_ConnectionError(int frameType, int length) + { + await Http3Api.InitializeConnectionAsync(_noopApplication); + + var controlStream = await Http3Api.CreateControlStream(); + + // Need to send settings frame before other frames, otherwise it's a connection error + if (frameType != (int)Http3FrameType.Settings) + { + await controlStream.SendSettingsAsync(new List()); + } + + await controlStream.SendFrameAsync((Http3FrameType)frameType, new byte[length]); + + await Http3Api.WaitForConnectionErrorAsync( + ignoreNonGoAwayFrames: true, + expectedLastStreamId: 0, + expectedErrorCode: Http3ErrorCode.FrameError, + matchExpectedErrorMessage: AssertExpectedErrorMessages, + expectedErrorMessage: CoreStrings.FormatHttp3ControlStreamFrameTooLarge(Http3Formatting.ToFormattedType((Http3FrameType)frameType))); + MetricsAssert.Equal(ConnectionEndReason.InvalidFrameLength, Http3Api.ConnectionTags); + } + [Fact] public async Task SETTINGS_MaxFieldSectionSizeSent_ServerReceivesValue() { diff --git a/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3StreamTests.cs b/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3StreamTests.cs index 6a44a92e6805..b7697117452e 100644 --- a/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3StreamTests.cs +++ b/src/Servers/Kestrel/test/InMemory.FunctionalTests/Http3/Http3StreamTests.cs @@ -1,7 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Buffers; using System.Globalization; +using System.IO.Pipelines; using System.Net.Http; using System.Runtime.ExceptionServices; using System.Text; @@ -11,8 +13,8 @@ using Microsoft.AspNetCore.Internal; using Microsoft.AspNetCore.InternalTesting; using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http3; -using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Infrastructure; using Microsoft.AspNetCore.WebUtilities; +using Microsoft.Extensions.Primitives; using Microsoft.Net.Http.Headers; namespace Microsoft.AspNetCore.Server.Kestrel.Core.Tests; @@ -1977,7 +1979,7 @@ public async Task RequestTrailers_CanReadTrailersFromRequest() var trailers = new[] { new KeyValuePair("TestName", "TestValue"), - }; + }; var requestStream = await Http3Api.InitializeConnectionAndStreamsAsync(async c => { await c.Request.Body.DrainAsync(default); @@ -2370,6 +2372,21 @@ public Task HEADERS_Received_HeaderBlockOverLimitx2_ConnectionError() return HEADERS_Received_InvalidHeaderFields_StreamError(headers, CoreStrings.BadRequest_HeadersExceedMaxTotalSize, Http3ErrorCode.RequestRejected); } + [Fact] + public Task HEADERS_Received_HeaderValueOverLimit_ConnectionError() + { + var limit = _serviceContext.ServerOptions.Limits.Http3.MaxRequestHeaderFieldSize; + // Single header value exceeds limit + var headers = new[] + { + new KeyValuePair("a", new string('a', limit + 1)), + }; + + return HEADERS_Received_InvalidHeaderFields_StreamError(headers, + SR.Format(SR.net_http_headers_exceeded_length, limit), + Http3ErrorCode.InternalError); + } + [Fact] public async Task HEADERS_Received_TooManyHeaders_431() { @@ -3000,4 +3017,296 @@ public async Task GetMemory_AfterAbort_GetsFakeMemory(int sizeHint) context.Response.BodyWriter.Advance(memory.Length); }, headers); } + + [Fact] + public async Task ControlStream_CloseBeforeSendingSettings() + { + await Http3Api.InitializeConnectionAsync(_noopApplication); + + var outboundcontrolStream = await Http3Api.CreateControlStream(); + + await outboundcontrolStream.EndStreamAsync(); + + await outboundcontrolStream.ReceiveEndAsync(); + } + + [Fact] + public async Task ControlStream_PartialFrameThenClose() + { + await Http3Api.InitializeConnectionAsync(_noopApplication); + + var outboundcontrolStream = await Http3Api.CreateControlStream(); + + var settings = new List + { + new Http3PeerSetting(Internal.Http3.Http3SettingType.MaxFieldSectionSize, 100), + new Http3PeerSetting(Internal.Http3.Http3SettingType.EnableWebTransport, 1), + new Http3PeerSetting(Internal.Http3.Http3SettingType.H3Datagram, 1) + }; + var len = Http3FrameWriter.CalculateSettingsSize(settings); + + Http3FrameWriter.WriteHeader(Http3FrameType.Settings, len, outboundcontrolStream.Pair.Application.Output); + + var parameterLength = VariableLengthIntegerHelper.WriteInteger(outboundcontrolStream.Pair.Application.Output.GetSpan(), (long)Internal.Http3.Http3SettingType.MaxFieldSectionSize); + outboundcontrolStream.Pair.Application.Output.Advance(parameterLength); + await outboundcontrolStream.Pair.Application.Output.FlushAsync(); + + await outboundcontrolStream.EndStreamAsync(); + + await outboundcontrolStream.ReceiveEndAsync(); + } + + [Fact] + public async Task SendDataObservesBackpressureFromApp() + { + var headers = new[] + { + new KeyValuePair(InternalHeaderNames.Method, "Custom"), + new KeyValuePair(InternalHeaderNames.Path, "/"), + new KeyValuePair(InternalHeaderNames.Scheme, "http"), + new KeyValuePair(InternalHeaderNames.Authority, "localhost:80"), + }; + + // Http3Stream hardcodes a 64k size for the RequestBodyPipe there is also the transport Pipe which we can influence with MaxRequestBufferSize + // So we need to send enough to fill up the 64k Pipe as well as the 100 byte Pipe. + var sendSize = 1024 * 65; + _serviceContext.ServerOptions.Limits.MaxRequestBufferSize = 100; + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var startedReadingTcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var requestStream = await Http3Api.InitializeConnectionAndStreamsAsync(async c => + { + // Read a single byte to make sure data has gotten here before we start verifying backpressure in the test code + var res = await c.Request.BodyReader.ReadAsync(); + Assert.Equal(sendSize, res.Buffer.Length); + c.Request.BodyReader.AdvanceTo(res.Buffer.Slice(1).Start); + startedReadingTcs.SetResult(); + + await tcs.Task; + res = await c.Request.BodyReader.ReadAsync(); + Assert.Equal(sendSize - 1, res.Buffer.Length); + c.Request.BodyReader.AdvanceTo(res.Buffer.End); + }, headers); + + var sendTask = requestStream.SendDataAsync(Encoding.ASCII.GetBytes(new string('a', sendSize))); + + // Wait for "app" code to start reading to ensure it has gotten bytes before we start verifying backpressure + await startedReadingTcs.Task; + Assert.False(sendTask.IsCompleted); + tcs.SetResult(); + + await sendTask; + + var responseHeaders = await requestStream.ExpectHeadersAsync(); + Assert.Equal("200", responseHeaders[InternalHeaderNames.Status]); + + await requestStream.ExpectReceiveEndOfStream(); + } + + [Fact] + public async Task Request_FrameParsingSingleByteAtATimeWorks() + { + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + var total = 0; + var trailerValue = string.Empty; + await Http3Api.InitializeConnectionAsync(async context => + { + var buffer = new byte[100]; + var read = await context.Request.Body.ReadAsync(buffer, 0, buffer.Length); + var captureTcs = tcs; + tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + captureTcs.SetResult(); + Assert.Equal(1, read); + total = read; + while (read > 0) + { + read = await context.Request.Body.ReadAsync(buffer, total, buffer.Length - total); + captureTcs = tcs; + tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + captureTcs.SetResult(); + total += read; + if (read == 0) + { + break; + } + Assert.Equal(1, read); + } + + trailerValue = context.Request.GetTrailer("TestName"); + }); + + // Use Inline scheduling and buffer size of 1 to guarantee each write will wait for the parsing loop to complete before writing more data + _serviceContext.ServerOptions.Limits.MaxRequestBufferSize = 1; + var stream = await Http3Api.CreateRequestStream(headers: [], clientWriterScheduler: PipeScheduler.Inline); + + // Use local pipe to write frames so we can get the entire buffer in order to write it one byte at a time + var bufferPipe = new Pipe(); + Http3FrameWriter.WriteHeader(Http3FrameType.Headers, frameLength: 38, bufferPipe.Writer); + + var headersTotalSize = 0; + var headers = new Http3HeadersEnumerator(); + headers.Initialize(new Dictionary() { + { InternalHeaderNames.Method, "POST" }, + { InternalHeaderNames.Path, "/" }, + { InternalHeaderNames.Scheme, "http" }, }); + + var mem = bufferPipe.Writer.GetMemory(); + var done = QPackHeaderWriter.BeginEncodeHeaders(headers, mem.Span, ref headersTotalSize, out var length); + Assert.True(done); + bufferPipe.Writer.Advance(length); + await bufferPipe.Writer.FlushAsync(); + + // Write header frame one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, stream.Pair.Application.Output); + + Http3FrameWriter.WriteHeader(Http3FrameType.Data, frameLength: 12, bufferPipe.Writer); + await bufferPipe.Writer.FlushAsync(); + + // Write data header one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, stream.Pair.Application.Output); + + bufferPipe.Writer.Write(new byte[12]); + await bufferPipe.Writer.FlushAsync(); + + // Write data in data frame one byte at a time + // Don't use WriteOneByteAtATime() as we want to wait on the TCS after every flush to make sure app code consumed the data + // before we send another byte + var res = await bufferPipe.Reader.ReadAsync(); + for (var i = 0; i < res.Buffer.Length; i++) + { + mem = stream.Pair.Application.Output.GetMemory(); + mem.Span[0] = res.Buffer.Slice(i).FirstSpan[0]; + stream.Pair.Application.Output.Advance(1); + // Use TCS to make sure app can read data before we send more + var capturedTcs = tcs; + await stream.Pair.Application.Output.FlushAsync(); + await capturedTcs.Task; + } + bufferPipe.Reader.AdvanceTo(res.Buffer.End); + + var trailers = new Http3HeadersEnumerator(); + trailers.Initialize(new Dictionary() + { + { "TestName", "TestValue" } + }); + + Http3FrameWriter.WriteHeader(Http3FrameType.Headers, frameLength: 22, bufferPipe.Writer); + mem = bufferPipe.Writer.GetMemory(); + done = QPackHeaderWriter.BeginEncodeHeaders(trailers, mem.Span, ref headersTotalSize, out length); + Assert.True(done); + bufferPipe.Writer.Advance(length); + await bufferPipe.Writer.FlushAsync(); + + // Write trailer frame one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, stream.Pair.Application.Output); + + await stream.EndStreamAsync(); + + var responseHeaders = await stream.ExpectHeadersAsync(); + Assert.Equal(3, responseHeaders.Count); + Assert.Contains("date", responseHeaders.Keys, StringComparer.OrdinalIgnoreCase); + Assert.Equal("200", responseHeaders[InternalHeaderNames.Status]); + Assert.Equal("0", responseHeaders["content-length"]); + + await stream.ExpectReceiveEndOfStream(); + + Assert.Equal(12, total); + Assert.Equal("TestValue", trailerValue); + } + + [Fact] + public async Task Control_FrameParsingSingleByteAtATimeWorks() + { + await Http3Api.InitializeConnectionAsync(_noopApplication); + + // Use Inline scheduling and buffer size of 1 to guarantee each write will wait for the parsing loop to complete before writing more data + _serviceContext.ServerOptions.Limits.MaxRequestBufferSize = 1; + var outboundcontrolStream = await Http3Api.CreateControlStream(clientWriterScheduler: PipeScheduler.Inline); + + // Use local pipe to write frames so we can get the entire buffer in order to write it one byte at a time + var bufferPipe = new Pipe(); + + var settings = new List + { + new Http3PeerSetting(Internal.Http3.Http3SettingType.MaxFieldSectionSize, 100), + new Http3PeerSetting(Internal.Http3.Http3SettingType.EnableWebTransport, 1), + new Http3PeerSetting(Internal.Http3.Http3SettingType.H3Datagram, 1) + }; + var len = Http3FrameWriter.CalculateSettingsSize(settings); + + Http3FrameWriter.WriteHeader(Http3FrameType.Settings, len, bufferPipe.Writer); + var mem = bufferPipe.Writer.GetMemory(); + Http3FrameWriter.WriteSettings(settings, mem.Span); + + bufferPipe.Writer.Advance(len); + await bufferPipe.Writer.FlushAsync(); + + // Write Settings frame one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, outboundcontrolStream.Pair.Application.Output); + + var fieldSetting = await Http3Api.ServerReceivedSettingsReader.ReadAsync().DefaultTimeout(); + + Assert.Equal(Internal.Http3.Http3SettingType.MaxFieldSectionSize, fieldSetting.Key); + Assert.Equal(100, fieldSetting.Value); + + fieldSetting = await Http3Api.ServerReceivedSettingsReader.ReadAsync().DefaultTimeout(); + Assert.Equal(Internal.Http3.Http3SettingType.EnableWebTransport, fieldSetting.Key); + Assert.Equal(1, fieldSetting.Value); + + fieldSetting = await Http3Api.ServerReceivedSettingsReader.ReadAsync().DefaultTimeout(); + Assert.Equal(Internal.Http3.Http3SettingType.H3Datagram, fieldSetting.Key); + Assert.Equal(1, fieldSetting.Value); + + // Frames must be well-formed otherwise we close the connection with a frame error + Http3FrameWriter.WriteHeader(Http3FrameType.CancelPush, frameLength: 2, bufferPipe.Writer); + var idLength = VariableLengthIntegerHelper.WriteInteger(bufferPipe.Writer.GetSpan(), longToEncode: 1026); + bufferPipe.Writer.Advance(idLength); + await bufferPipe.Writer.FlushAsync(); + + // Write CancelPush frame one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, outboundcontrolStream.Pair.Application.Output); + + // Frames must be well-formed otherwise we close the connection with a frame error + Http3FrameWriter.WriteHeader(Http3FrameType.GoAway, frameLength: 4, bufferPipe.Writer); + idLength = VariableLengthIntegerHelper.WriteInteger(bufferPipe.Writer.GetSpan(), longToEncode: 100026); + bufferPipe.Writer.Advance(idLength); + await bufferPipe.Writer.FlushAsync(); + + try + { + // Write GoAway frame one byte at a time + await WriteOneByteAtATime(bufferPipe.Reader, outboundcontrolStream.Pair.Application.Output); + } + // As soon as the GOAWAY frame identifier is processed we initiate the connection close process. + // That means it's possible to still be writing to the stream when we close the + // connection which would result in an exception. We'll just ignore the exception in this case. + catch (Exception) { } + + await outboundcontrolStream.EndStreamAsync(); + + // Check that connection is closed. + var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + Http3Api.MultiplexedConnectionContext.ConnectionClosed.Register(() => tcs.TrySetResult()); + await tcs.Task; + + await outboundcontrolStream.ReceiveEndAsync(); + } + + private async Task WriteOneByteAtATime(PipeReader reader, PipeWriter writer) + { + var res = await reader.ReadAsync(); + try + { + for (var i = 0; i < res.Buffer.Length; i++) + { + var mem = writer.GetMemory(); + mem.Span[0] = res.Buffer.Slice(i).FirstSpan[0]; + writer.Advance(1); + await writer.FlushAsync(); + } + } + finally + { + reader.AdvanceTo(res.Buffer.End); + } + } } diff --git a/src/Shared/CertificateGeneration/UnixCertificateManager.cs b/src/Shared/CertificateGeneration/UnixCertificateManager.cs index c583c4d370ed..9212fc475cf7 100644 --- a/src/Shared/CertificateGeneration/UnixCertificateManager.cs +++ b/src/Shared/CertificateGeneration/UnixCertificateManager.cs @@ -62,18 +62,32 @@ public override TrustLevel GetTrustLevel(X509Certificate2 certificate) // Building the chain will check whether dotnet trusts the cert. We could, instead, // enumerate the Root store and/or look for the file in the OpenSSL directory, but // this tests the real-world behavior. - using var chain = new X509Chain(); - // This is just a heuristic for whether or not we should prompt the user to re-run with `--trust` - // so we don't need to check revocation (which doesn't really make sense for dev certs anyway) - chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - if (chain.Build(certificate)) + var chain = new X509Chain(); + try { - sawTrustSuccess = true; + // This is just a heuristic for whether or not we should prompt the user to re-run with `--trust` + // so we don't need to check revocation (which doesn't really make sense for dev certs anyway) + chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + if (chain.Build(certificate)) + { + sawTrustSuccess = true; + } + else + { + sawTrustFailure = true; + Log.UnixNotTrustedByDotnet(); + } } - else + finally { - sawTrustFailure = true; - Log.UnixNotTrustedByDotnet(); + // Disposing the chain does not dispose the elements we potentially built. + // Do the full walk manually to dispose. + for (var i = 0; i < chain.ChainElements.Count; i++) + { + chain.ChainElements[i].Certificate.Dispose(); + } + + chain.Dispose(); } // Will become the name of the file on disk and the nickname in the NSS DBs @@ -94,7 +108,7 @@ public override TrustLevel GetTrustLevel(X509Certificate2 certificate) var certPath = Path.Combine(sslCertDir, certificateNickname + ".pem"); if (File.Exists(certPath)) { - var candidate = X509CertificateLoader.LoadCertificateFromFile(certPath); + using var candidate = X509CertificateLoader.LoadCertificateFromFile(certPath); if (AreCertificatesEqual(certificate, candidate)) { foundCert = true; diff --git a/src/Shared/ThrowHelpers/ArgumentNullThrowHelper.cs b/src/Shared/ThrowHelpers/ArgumentNullThrowHelper.cs index fc1d5c847d74..e83e87423745 100644 --- a/src/Shared/ThrowHelpers/ArgumentNullThrowHelper.cs +++ b/src/Shared/ThrowHelpers/ArgumentNullThrowHelper.cs @@ -30,6 +30,29 @@ public static void ThrowIfNull( #endif } + /// Throws an if is null or empty. + /// The argument to validate as non-null and non-empty. + /// The name of the parameter with which corresponds. + public static void ThrowIfNullOrEmpty( +#if INTERNAL_NULLABLE_ATTRIBUTES || NETSTANDARD2_1_OR_GREATER || NET5_0_OR_GREATER + [NotNull] +#endif + string? argument, [CallerArgumentExpression(nameof(argument))] string? paramName = null) + { +#if !NET7_0_OR_GREATER || NETSTANDARD || NETFRAMEWORK + if (argument is null) + { + Throw(paramName); + } + else if (argument.Length == 0) + { + throw new ArgumentException("Must not be null or empty", paramName); + } +#else + ArgumentException.ThrowIfNullOrEmpty(argument, paramName); +#endif + } + #if !NET7_0_OR_GREATER || NETSTANDARD || NETFRAMEWORK #if INTERNAL_NULLABLE_ATTRIBUTES || NETSTANDARD2_1_OR_GREATER || NET5_0_OR_GREATER [DoesNotReturn] diff --git a/src/Shared/runtime/Http3/Helpers/VariableLengthIntegerHelper.cs b/src/Shared/runtime/Http3/Helpers/VariableLengthIntegerHelper.cs index 3a343a62a4cc..c7f1ec908d0f 100644 --- a/src/Shared/runtime/Http3/Helpers/VariableLengthIntegerHelper.cs +++ b/src/Shared/runtime/Http3/Helpers/VariableLengthIntegerHelper.cs @@ -128,19 +128,19 @@ static bool TryReadSlow(ref SequenceReader reader, out long value) } } - public static long GetInteger(in ReadOnlySequence buffer, out SequencePosition consumed, out SequencePosition examined) + // If callsite has 'examined', set it to buffer.End if the integer wasn't successfully read, otherwise set examined = consumed. + public static bool TryGetInteger(in ReadOnlySequence buffer, out SequencePosition consumed, out long integer) { var reader = new SequenceReader(buffer); - if (TryRead(ref reader, out long value)) + if (TryRead(ref reader, out integer)) { - consumed = examined = buffer.GetPosition(reader.Consumed); - return value; + consumed = buffer.GetPosition(reader.Consumed); + return true; } else { - consumed = default; - examined = buffer.End; - return -1; + consumed = buffer.Start; + return false; } } diff --git a/src/Shared/test/Shared.Tests/runtime/Http3/VariableLengthIntegerHelperTests.cs b/src/Shared/test/Shared.Tests/runtime/Http3/VariableLengthIntegerHelperTests.cs index d67d24c0ba25..e461bfd41ed4 100644 --- a/src/Shared/test/Shared.Tests/runtime/Http3/VariableLengthIntegerHelperTests.cs +++ b/src/Shared/test/Shared.Tests/runtime/Http3/VariableLengthIntegerHelperTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; @@ -223,12 +223,12 @@ public void GetInteger_ValidSegmentedSequence() MemorySegment memorySegment2 = memorySegment1.Append(new byte[] { 0, 0, 0, 0, 0, 0, 2 }); ReadOnlySequence readOnlySequence = new ReadOnlySequence( memorySegment1, 0, memorySegment2, memorySegment2.Memory.Length); - long result = VariableLengthIntegerHelper.GetInteger(readOnlySequence, - out SequencePosition consumed, out SequencePosition examined); + bool result = VariableLengthIntegerHelper.TryGetInteger(readOnlySequence, + out SequencePosition consumed, out long integer); - Assert.Equal(2, result); + Assert.True(result); + Assert.Equal(2, integer); Assert.Equal(7, consumed.GetInteger()); - Assert.Equal(7, examined.GetInteger()); } [Fact] @@ -238,12 +238,11 @@ public void GetInteger_NotValidSegmentedSequence() MemorySegment memorySegment2 = memorySegment1.Append(new byte[] { 0, 0, 0, 0, 0, 2 }); ReadOnlySequence readOnlySequence = new ReadOnlySequence( memorySegment1, 0, memorySegment2, memorySegment2.Memory.Length); - long result = VariableLengthIntegerHelper.GetInteger(readOnlySequence, - out SequencePosition consumed, out SequencePosition examined); + bool result = VariableLengthIntegerHelper.TryGetInteger(readOnlySequence, + out SequencePosition consumed, out long integer); - Assert.Equal(-1, result); + Assert.False(result); Assert.Equal(0, consumed.GetInteger()); - Assert.Equal(6, examined.GetInteger()); } [Fact] diff --git a/src/SignalR/clients/java/signalr/build.gradle b/src/SignalR/clients/java/signalr/build.gradle index 895f8c4338d3..3e192445c97e 100644 --- a/src/SignalR/clients/java/signalr/build.gradle +++ b/src/SignalR/clients/java/signalr/build.gradle @@ -22,7 +22,7 @@ allprojects { version project.findProperty('packageVersion') ?: "99.99.99-dev" java { - sourceCompatibility = 1.8 + sourceCompatibility = 1.9 } repositories { diff --git a/src/SignalR/clients/java/signalr/core/src/main/java/com/microsoft/signalr/GsonHubProtocol.java b/src/SignalR/clients/java/signalr/core/src/main/java/com/microsoft/signalr/GsonHubProtocol.java index 042ca484806f..4b0c8848e816 100644 --- a/src/SignalR/clients/java/signalr/core/src/main/java/com/microsoft/signalr/GsonHubProtocol.java +++ b/src/SignalR/clients/java/signalr/core/src/main/java/com/microsoft/signalr/GsonHubProtocol.java @@ -126,7 +126,14 @@ public List parseMessages(ByteBuffer payload, InvocationBinder binde } break; case "headers": - throw new RuntimeException("Headers not implemented yet."); + // Parse headers as Map but don't store for now as it's unused + reader.beginObject(); + while (reader.hasNext()) { + reader.nextName(); // Read the key + reader.nextString(); // Read the value + } + reader.endObject(); + break; default: // Skip unknown property, allows new clients to still work with old protocols reader.skipValue(); diff --git a/src/SignalR/clients/java/signalr/test/signalr.client.java.Tests.javaproj b/src/SignalR/clients/java/signalr/test/signalr.client.java.Tests.javaproj index 823c53ae8a72..8068629f03b3 100644 --- a/src/SignalR/clients/java/signalr/test/signalr.client.java.Tests.javaproj +++ b/src/SignalR/clients/java/signalr/test/signalr.client.java.Tests.javaproj @@ -6,6 +6,8 @@ true true + + OSX.15.Amd64.Open;$(SkipHelixQueues) $(OutputPath) true diff --git a/src/SignalR/clients/java/signalr/test/src/main/java/com/microsoft/signalr/GsonHubProtocolTest.java b/src/SignalR/clients/java/signalr/test/src/main/java/com/microsoft/signalr/GsonHubProtocolTest.java index 53454be031b6..2ecfd483c7f5 100644 --- a/src/SignalR/clients/java/signalr/test/src/main/java/com/microsoft/signalr/GsonHubProtocolTest.java +++ b/src/SignalR/clients/java/signalr/test/src/main/java/com/microsoft/signalr/GsonHubProtocolTest.java @@ -444,7 +444,7 @@ public void invocationBindingFailureWhenParsingLocalDateTimeWithoutAppropriateTy assertEquals(HubMessageType.INVOCATION_BINDING_FAILURE, message.getMessageType()); InvocationBindingFailureMessage failureMessage = (InvocationBindingFailureMessage) messages.get(0); - assertEquals("java.lang.IllegalStateException: Expected BEGIN_OBJECT but was STRING at line 1 column 41 path $.arguments[0]", failureMessage.getException().getMessage()); + assertEquals("com.google.gson.JsonSyntaxException", failureMessage.getException().getClass().getName()); } @Test @@ -527,4 +527,98 @@ public void canRegisterTypeAdaptorWithoutAffectingJsonProtocol() { assertEquals(3, (int) invocationMessage.getArguments()[0]); assertEquals("four", invocationMessage.getArguments()[1]); } + + @Test + public void canParseInvocationMessageWithHeaders() { + String stringifiedMessage = "{\"type\":1,\"target\":\"test\",\"arguments\":[42],\"headers\":{\"a\":\"b\",\"c\":\"d\"}}\u001E"; + ByteBuffer message = TestUtils.stringToByteBuffer(stringifiedMessage); + TestBinder binder = new TestBinder(new Type[] { int.class }, null); + + List messages = hubProtocol.parseMessages(message, binder); + + assertNotNull(messages); + assertEquals(1, messages.size()); + + assertEquals(HubMessageType.INVOCATION, messages.get(0).getMessageType()); + InvocationMessage invocationMessage = (InvocationMessage) messages.get(0); + + assertEquals("test", invocationMessage.getTarget()); + assertEquals(null, invocationMessage.getInvocationId()); + int messageResult = (int)invocationMessage.getArguments()[0]; + assertEquals(42, messageResult); + // Headers are parsed but not stored, so we just verify the message was processed successfully + } + + @Test + public void canParseInvocationMessageWithEmptyHeaders() { + String stringifiedMessage = "{\"type\":1,\"target\":\"test\",\"arguments\":[42],\"headers\":{}}\u001E"; + ByteBuffer message = TestUtils.stringToByteBuffer(stringifiedMessage); + TestBinder binder = new TestBinder(new Type[] { int.class }, null); + + List messages = hubProtocol.parseMessages(message, binder); + + assertNotNull(messages); + assertEquals(1, messages.size()); + + assertEquals(HubMessageType.INVOCATION, messages.get(0).getMessageType()); + InvocationMessage invocationMessage = (InvocationMessage) messages.get(0); + + assertEquals("test", invocationMessage.getTarget()); + int messageResult = (int)invocationMessage.getArguments()[0]; + assertEquals(42, messageResult); + } + + @Test + public void canParseCompletionMessageWithHeaders() { + String stringifiedMessage = "{\"type\":3,\"invocationId\":\"1\",\"result\":42,\"headers\":{\"a\":\"b\",\"c\":\"d\"}}\u001E"; + ByteBuffer message = TestUtils.stringToByteBuffer(stringifiedMessage); + TestBinder binder = new TestBinder(null, int.class); + + List messages = hubProtocol.parseMessages(message, binder); + + assertNotNull(messages); + assertEquals(1, messages.size()); + + assertEquals(HubMessageType.COMPLETION, messages.get(0).getMessageType()); + CompletionMessage completionMessage = (CompletionMessage) messages.get(0); + assertEquals("1", completionMessage.getInvocationId()); + assertEquals(42, completionMessage.getResult()); + assertEquals(null, completionMessage.getError()); + } + + @Test + public void canParseStreamItemMessageWithHeaders() { + String stringifiedMessage = "{\"type\":2,\"invocationId\":\"1\",\"item\":\"test-item\",\"headers\":{\"a\":\"b\"}}\u001E"; + ByteBuffer message = TestUtils.stringToByteBuffer(stringifiedMessage); + TestBinder binder = new TestBinder(null, String.class); + + List messages = hubProtocol.parseMessages(message, binder); + + assertNotNull(messages); + assertEquals(1, messages.size()); + + assertEquals(HubMessageType.STREAM_ITEM, messages.get(0).getMessageType()); + StreamItem streamItem = (StreamItem) messages.get(0); + assertEquals("1", streamItem.getInvocationId()); + assertEquals("test-item", streamItem.getItem()); + } + + @Test + public void canParseMessageWithHeadersInDifferentOrder() { + String stringifiedMessage = "{\"headers\":{\"First\":\"value1\",\"Second\":\"value2\"},\"type\":1,\"target\":\"test\",\"arguments\":[42]}\u001E"; + ByteBuffer message = TestUtils.stringToByteBuffer(stringifiedMessage); + TestBinder binder = new TestBinder(new Type[] { int.class }, null); + + List messages = hubProtocol.parseMessages(message, binder); + + assertNotNull(messages); + assertEquals(1, messages.size()); + + assertEquals(HubMessageType.INVOCATION, messages.get(0).getMessageType()); + InvocationMessage invocationMessage = (InvocationMessage) messages.get(0); + + assertEquals("test", invocationMessage.getTarget()); + int messageResult = (int)invocationMessage.getArguments()[0]; + assertEquals(42, messageResult); + } } diff --git a/src/SignalR/common/Shared/MessageBuffer.cs b/src/SignalR/common/Shared/MessageBuffer.cs index 17b9ae170fe0..f08fff86aa40 100644 --- a/src/SignalR/common/Shared/MessageBuffer.cs +++ b/src/SignalR/common/Shared/MessageBuffer.cs @@ -121,15 +121,16 @@ private async Task RunTimer() public ValueTask WriteAsync(SerializedHubMessage hubMessage, CancellationToken cancellationToken) { - return WriteAsyncCore(hubMessage.Message!, hubMessage.GetSerializedMessage(_protocol), cancellationToken); + // Default to HubInvocationMessage as that's the only type we use SerializedHubMessage for currently when Message is null. Should harden this in the future. + return WriteAsyncCore(hubMessage.Message?.GetType() ?? typeof(HubInvocationMessage), hubMessage.GetSerializedMessage(_protocol), cancellationToken); } public ValueTask WriteAsync(HubMessage hubMessage, CancellationToken cancellationToken) { - return WriteAsyncCore(hubMessage, _protocol.GetMessageBytes(hubMessage), cancellationToken); + return WriteAsyncCore(hubMessage.GetType(), _protocol.GetMessageBytes(hubMessage), cancellationToken); } - private async ValueTask WriteAsyncCore(HubMessage hubMessage, ReadOnlyMemory messageBytes, CancellationToken cancellationToken) + private async ValueTask WriteAsyncCore(Type hubMessageType, ReadOnlyMemory messageBytes, CancellationToken cancellationToken) { // TODO: Add backpressure based on message count if (_bufferedByteCount > _bufferLimit) @@ -158,7 +159,7 @@ private async ValueTask WriteAsyncCore(HubMessage hubMessage, ReadO await _writeLock.WaitAsync(cancellationToken: default).ConfigureAwait(false); try { - if (hubMessage is HubInvocationMessage invocationMessage) + if (typeof(HubInvocationMessage).IsAssignableFrom(hubMessageType)) { _totalMessageCount++; _bufferedByteCount += messageBytes.Length; diff --git a/src/SignalR/common/Shared/Utf8BufferTextWriter.cs b/src/SignalR/common/Shared/Utf8BufferTextWriter.cs index 6c993f11be7a..f86432af249a 100644 --- a/src/SignalR/common/Shared/Utf8BufferTextWriter.cs +++ b/src/SignalR/common/Shared/Utf8BufferTextWriter.cs @@ -35,6 +35,12 @@ public Utf8BufferTextWriter() _encoder = _utf8NoBom.GetEncoder(); } + public Utf8BufferTextWriter(IFormatProvider formatProvider) + : base(formatProvider) + { + _encoder = _utf8NoBom.GetEncoder(); + } + public static Utf8BufferTextWriter Get(IBufferWriter bufferWriter) { var writer = _cachedInstance; diff --git a/src/SignalR/server/Core/src/SerializedHubMessage.cs b/src/SignalR/server/Core/src/SerializedHubMessage.cs index e355b0329128..9f4327a4cc58 100644 --- a/src/SignalR/server/Core/src/SerializedHubMessage.cs +++ b/src/SignalR/server/Core/src/SerializedHubMessage.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Diagnostics; using Microsoft.AspNetCore.SignalR.Protocol; namespace Microsoft.AspNetCore.SignalR; @@ -40,6 +41,8 @@ public SerializedHubMessage(IReadOnlyList messages) /// The hub message for the cache. This will be serialized with an in to get the message's serialized representation. public SerializedHubMessage(HubMessage message) { + // Type currently only used for invocation messages, we should probably refactor it to be explicit about that e.g. new property for message type? + Debug.Assert(message.GetType().IsAssignableTo(typeof(HubInvocationMessage))); Message = message; } diff --git a/src/SignalR/server/SignalR/test/Microsoft.AspNetCore.SignalR.Tests/Internal/MessageBufferTests.cs b/src/SignalR/server/SignalR/test/Microsoft.AspNetCore.SignalR.Tests/Internal/MessageBufferTests.cs index a7e87d2b8bda..540ea462e199 100644 --- a/src/SignalR/server/SignalR/test/Microsoft.AspNetCore.SignalR.Tests/Internal/MessageBufferTests.cs +++ b/src/SignalR/server/SignalR/test/Microsoft.AspNetCore.SignalR.Tests/Internal/MessageBufferTests.cs @@ -2,11 +2,12 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.IO.Pipelines; +using System.Text.Json; using Microsoft.AspNetCore.Connections; using Microsoft.AspNetCore.Http.Features; +using Microsoft.AspNetCore.InternalTesting; using Microsoft.AspNetCore.SignalR.Internal; using Microsoft.AspNetCore.SignalR.Protocol; -using Microsoft.AspNetCore.InternalTesting; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; @@ -169,6 +170,62 @@ public async Task UnAckedMessageResentOnReconnect() Assert.False(messageBuffer.ShouldProcessMessage(CompletionMessage.WithResult("1", null))); } + // Regression test for https://github.com/dotnet/aspnetcore/issues/55575 + [Fact] + public async Task UnAckedSerializedMessageResentOnReconnect() + { + var protocol = new JsonHubProtocol(); + var connection = new TestConnectionContext(); + var pipes = DuplexPipe.CreateConnectionPair(new PipeOptions(), new PipeOptions()); + connection.Transport = pipes.Transport; + using var messageBuffer = new MessageBuffer(connection, protocol, bufferLimit: 1000, NullLogger.Instance); + + var invocationMessage = new SerializedHubMessage([new SerializedMessage(protocol.Name, + protocol.GetMessageBytes(new InvocationMessage("method1", [1])))]); + await messageBuffer.WriteAsync(invocationMessage, default); + + var res = await pipes.Application.Input.ReadAsync(); + + var buffer = res.Buffer; + Assert.True(protocol.TryParseMessage(ref buffer, new TestBinder(), out var message)); + var parsedMessage = Assert.IsType(message); + Assert.Equal("method1", parsedMessage.Target); + Assert.Equal(1, ((JsonElement)Assert.Single(parsedMessage.Arguments)).GetInt32()); + + pipes.Application.Input.AdvanceTo(buffer.Start); + + DuplexPipe.UpdateConnectionPair(ref pipes, connection); + await messageBuffer.ResendAsync(pipes.Transport.Output); + + Assert.True(messageBuffer.ShouldProcessMessage(PingMessage.Instance)); + Assert.True(messageBuffer.ShouldProcessMessage(CompletionMessage.WithResult("1", null))); + Assert.True(messageBuffer.ShouldProcessMessage(new SequenceMessage(1))); + + res = await pipes.Application.Input.ReadAsync(); + + buffer = res.Buffer; + Assert.True(protocol.TryParseMessage(ref buffer, new TestBinder(), out message)); + var seqMessage = Assert.IsType(message); + Assert.Equal(1, seqMessage.SequenceId); + + pipes.Application.Input.AdvanceTo(buffer.Start); + + res = await pipes.Application.Input.ReadAsync(); + + buffer = res.Buffer; + Assert.True(protocol.TryParseMessage(ref buffer, new TestBinder(), out message)); + parsedMessage = Assert.IsType(message); + Assert.Equal("method1", parsedMessage.Target); + Assert.Equal(1, ((JsonElement)Assert.Single(parsedMessage.Arguments)).GetInt32()); + + pipes.Application.Input.AdvanceTo(buffer.Start); + + messageBuffer.ShouldProcessMessage(new SequenceMessage(1)); + + Assert.True(messageBuffer.ShouldProcessMessage(PingMessage.Instance)); + Assert.False(messageBuffer.ShouldProcessMessage(CompletionMessage.WithResult("1", null))); + } + [Fact] public async Task AckedMessageNotResentOnReconnect() { diff --git a/src/SignalR/server/StackExchangeRedis/test/Docker.cs b/src/SignalR/server/StackExchangeRedis/test/Docker.cs index 76fa6440e672..41315734daea 100644 --- a/src/SignalR/server/StackExchangeRedis/test/Docker.cs +++ b/src/SignalR/server/StackExchangeRedis/test/Docker.cs @@ -16,7 +16,8 @@ public class Docker { private static readonly string _exeSuffix = OperatingSystem.IsWindows() ? ".exe" : string.Empty; - private static readonly string _dockerContainerName = "redisTestContainer"; + private static readonly string _redisImageName = "dotnetdhmirror-f8bzbjakh8cga6ab.azurecr.io/library/redis:7.4"; + private static readonly string _dockerContainerName = "redisTestContainer74"; private static readonly string _dockerMonitorContainerName = _dockerContainerName + "Monitor"; private static readonly Lazy _instance = new Lazy(Create); @@ -112,7 +113,7 @@ void Run() // use static name 'redisTestContainer' so if the container doesn't get removed we don't keep adding more // use redis base docker image // 30 second timeout to allow redis image to be downloaded, should be a rare occurrence, only happening when a new version is released - RunProcessAndThrowIfFailed(_path, $"run --rm -p 6379:6379 --name {_dockerContainerName} -d redis", "redis", logger, TimeSpan.FromMinutes(1)); + RunProcessAndThrowIfFailed(_path, $"run --rm -p 6379:6379 --name {_dockerContainerName} -d {_redisImageName}", "redis", logger, TimeSpan.FromMinutes(1)); } } diff --git a/src/SignalR/server/StackExchangeRedis/test/RedisEndToEnd.cs b/src/SignalR/server/StackExchangeRedis/test/RedisEndToEnd.cs index 63445379ac35..f3501fb69c1e 100644 --- a/src/SignalR/server/StackExchangeRedis/test/RedisEndToEnd.cs +++ b/src/SignalR/server/StackExchangeRedis/test/RedisEndToEnd.cs @@ -1,17 +1,15 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using System; -using System.Collections.Generic; -using System.Threading.Tasks; +using System.Net.WebSockets; using Microsoft.AspNetCore.Http.Connections; +using Microsoft.AspNetCore.Http.Connections.Client; +using Microsoft.AspNetCore.InternalTesting; using Microsoft.AspNetCore.SignalR.Client; using Microsoft.AspNetCore.SignalR.Protocol; using Microsoft.AspNetCore.SignalR.Tests; -using Microsoft.AspNetCore.InternalTesting; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; -using Xunit; namespace Microsoft.AspNetCore.SignalR.StackExchangeRedis.Tests; @@ -37,6 +35,7 @@ public RedisEndToEndTests(RedisServerFixture serverFixture) [ConditionalTheory] [SkipIfDockerNotPresent] [MemberData(nameof(TransportTypesAndProtocolTypes))] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/62435")] public async Task HubConnectionCanSendAndReceiveMessages(HttpTransportType transportType, string protocolName) { using (StartVerifiableLog()) @@ -57,6 +56,7 @@ public async Task HubConnectionCanSendAndReceiveMessages(HttpTransportType trans [ConditionalTheory] [SkipIfDockerNotPresent] [MemberData(nameof(TransportTypesAndProtocolTypes))] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/62435")] public async Task HubConnectionCanSendAndReceiveGroupMessages(HttpTransportType transportType, string protocolName) { using (StartVerifiableLog()) @@ -118,6 +118,7 @@ public async Task CanSendAndReceiveUserMessagesFromMultipleConnectionsWithSameUs [ConditionalTheory] [SkipIfDockerNotPresent] [MemberData(nameof(TransportTypesAndProtocolTypes))] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/62435")] public async Task CanSendAndReceiveUserMessagesWhenOneConnectionWithUserDisconnects(HttpTransportType transportType, string protocolName) { // Regression test: @@ -147,6 +148,7 @@ public async Task CanSendAndReceiveUserMessagesWhenOneConnectionWithUserDisconne [ConditionalTheory] [SkipIfDockerNotPresent] [MemberData(nameof(TransportTypesAndProtocolTypes))] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/62435")] public async Task HubConnectionCanSendAndReceiveGroupMessagesGroupNameWithPatternIsTreatedAsLiteral(HttpTransportType transportType, string protocolName) { using (StartVerifiableLog()) @@ -211,7 +213,106 @@ public async Task CanSendAndReceiveUserMessagesUserNameWithPatternIsTreatedAsLit } } - private static HubConnection CreateConnection(string url, HttpTransportType transportType, IHubProtocol protocol, ILoggerFactory loggerFactory, string userName = null) + [ConditionalTheory] + [SkipIfDockerNotPresent] + [InlineData("messagepack")] + [InlineData("json")] + [QuarantinedTest("https://github.com/dotnet/aspnetcore/issues/62435")] + public async Task StatefulReconnectPreservesMessageFromOtherServer(string protocolName) + { + using (StartVerifiableLog()) + { + var protocol = HubProtocolHelpers.GetHubProtocol(protocolName); + + ClientWebSocket innerWs = null; + WebSocketWrapper ws = null; + TaskCompletionSource reconnectTcs = null; + TaskCompletionSource startedReconnectTcs = null; + + var connection = CreateConnection(_serverFixture.FirstServer.Url + "/stateful", HttpTransportType.WebSockets, protocol, LoggerFactory, + customizeConnection: builder => + { + builder.WithStatefulReconnect(); + builder.Services.Configure(o => + { + // Replace the websocket creation for the first connection so we can make the client think there was an ungraceful closure + // Which will trigger the stateful reconnect flow + o.WebSocketFactory = async (context, token) => + { + if (reconnectTcs is null) + { + reconnectTcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + startedReconnectTcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + } + else + { + startedReconnectTcs.SetResult(); + // We only want to wait on the reconnect, not the initial connection attempt + await reconnectTcs.Task.DefaultTimeout(); + } + + innerWs = new ClientWebSocket(); + ws = new WebSocketWrapper(innerWs); + await innerWs.ConnectAsync(context.Uri, token); + + _ = Task.Run(async () => + { + try + { + while (innerWs.State == WebSocketState.Open) + { + var buffer = new byte[1024]; + var res = await innerWs.ReceiveAsync(buffer, default); + ws.SetReceiveResult((res, buffer.AsMemory(0, res.Count))); + } + } + // Log but ignore receive errors, that likely just means the connection closed + catch (Exception ex) + { + Logger.LogInformation(ex, "Error while reading from inner websocket"); + } + }); + + return ws; + }; + }); + }); + var secondConnection = CreateConnection(_serverFixture.SecondServer.Url + "/stateful", HttpTransportType.WebSockets, protocol, LoggerFactory); + + var tcs = new TaskCompletionSource(); + connection.On("SendToAll", message => tcs.TrySetResult(message)); + + var tcs2 = new TaskCompletionSource(); + secondConnection.On("SendToAll", message => tcs2.TrySetResult(message)); + + await connection.StartAsync().DefaultTimeout(); + await secondConnection.StartAsync().DefaultTimeout(); + + // Close first connection before the second connection sends a message to all clients + await ws.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, statusDescription: null, default); + await startedReconnectTcs.Task.DefaultTimeout(); + + // Send to all clients, since both clients are on different servers this means the backplane will be used + // And we want to test that messages are still preserved for stateful reconnect purposes when a client disconnects + // But is on a different server from the original message sender. + await secondConnection.SendAsync("SendToAll", "test message").DefaultTimeout(); + + // Check that second connection still receives the message + Assert.Equal("test message", await tcs2.Task.DefaultTimeout()); + Assert.False(tcs.Task.IsCompleted); + + // allow first connection to reconnect + reconnectTcs.SetResult(); + + // Check that first connection received the message once it reconnected + Assert.Equal("test message", await tcs.Task.DefaultTimeout()); + + await connection.DisposeAsync().DefaultTimeout(); + } + } + + private static HubConnection CreateConnection(string url, HttpTransportType transportType, IHubProtocol protocol, ILoggerFactory loggerFactory, string userName = null, + Action customizeConnection = null) { var hubConnectionBuilder = new HubConnectionBuilder() .WithLoggerFactory(loggerFactory) @@ -225,6 +326,8 @@ private static HubConnection CreateConnection(string url, HttpTransportType tran hubConnectionBuilder.Services.AddSingleton(protocol); + customizeConnection?.Invoke(hubConnectionBuilder); + return hubConnectionBuilder.Build(); } @@ -253,4 +356,67 @@ public static IEnumerable TransportTypesAndProtocolTypes } } } + + internal sealed class WebSocketWrapper : WebSocket + { + private readonly WebSocket _inner; + private TaskCompletionSource<(WebSocketReceiveResult, ReadOnlyMemory)> _receiveTcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + + public WebSocketWrapper(WebSocket inner) + { + _inner = inner; + } + + public override WebSocketCloseStatus? CloseStatus => _inner.CloseStatus; + + public override string CloseStatusDescription => _inner.CloseStatusDescription; + + public override WebSocketState State => _inner.State; + + public override string SubProtocol => _inner.SubProtocol; + + public override void Abort() + { + _inner.Abort(); + } + + public override Task CloseAsync(WebSocketCloseStatus closeStatus, string statusDescription, CancellationToken cancellationToken) + { + return _inner.CloseAsync(closeStatus, statusDescription, cancellationToken); + } + + public override Task CloseOutputAsync(WebSocketCloseStatus closeStatus, string statusDescription, CancellationToken cancellationToken) + { + _receiveTcs.TrySetException(new IOException("force reconnect")); + return Task.CompletedTask; + } + + public override void Dispose() + { + _inner.Dispose(); + } + + public void SetReceiveResult((WebSocketReceiveResult, ReadOnlyMemory) result) + { + _receiveTcs.SetResult(result); + } + + public override async Task ReceiveAsync(ArraySegment buffer, CancellationToken cancellationToken) + { + var res = await _receiveTcs.Task; + // Handle zero-byte reads + if (buffer.Count == 0) + { + return res.Item1; + } + _receiveTcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + res.Item2.CopyTo(buffer); + return res.Item1; + } + + public override Task SendAsync(ArraySegment buffer, WebSocketMessageType messageType, bool endOfMessage, CancellationToken cancellationToken) + { + return _inner.SendAsync(buffer, messageType, endOfMessage, cancellationToken); + } + } } diff --git a/src/SignalR/server/StackExchangeRedis/test/Startup.cs b/src/SignalR/server/StackExchangeRedis/test/Startup.cs index 3fd461aed98e..1b55bd1cff53 100644 --- a/src/SignalR/server/StackExchangeRedis/test/Startup.cs +++ b/src/SignalR/server/StackExchangeRedis/test/Startup.cs @@ -33,6 +33,7 @@ public void Configure(IApplicationBuilder app) app.UseEndpoints(endpoints => { endpoints.MapHub("/echo"); + endpoints.MapHub("/stateful", o => o.AllowStatefulReconnects = true); }); } diff --git a/src/SignalR/server/StackExchangeRedis/test/StatefulHub.cs b/src/SignalR/server/StackExchangeRedis/test/StatefulHub.cs new file mode 100644 index 000000000000..1efa1d84fcd0 --- /dev/null +++ b/src/SignalR/server/StackExchangeRedis/test/StatefulHub.cs @@ -0,0 +1,12 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +namespace Microsoft.AspNetCore.SignalR.StackExchangeRedis.Tests; + +public class StatefulHub : Hub +{ + public Task SendToAll(string message) + { + return Clients.All.SendAsync("SendToAll", message); + } +} diff --git a/src/StaticAssets/src/StaticAssetsInvoker.cs b/src/StaticAssets/src/StaticAssetsInvoker.cs index 1d21cc2929ec..c7555c901b93 100644 --- a/src/StaticAssets/src/StaticAssetsInvoker.cs +++ b/src/StaticAssets/src/StaticAssetsInvoker.cs @@ -223,7 +223,7 @@ private async Task SendRangeAsync(StaticAssetInvocationContext requestContext, R if (requestContext.Response.StatusCode == StatusCodes.Status200OK) { - requestContext.Response.StatusCode = StatusCodes.Status416RangeNotSatisfiable; + requestContext.Response.StatusCode = StatusCodes.Status206PartialContent; } await ApplyResponseHeadersAsync(requestContext, StatusCodes.Status206PartialContent); diff --git a/src/StaticAssets/test/StaticAssetsIntegrationTests.cs b/src/StaticAssets/test/StaticAssetsIntegrationTests.cs index 541a468a15a2..fda161ea055a 100644 --- a/src/StaticAssets/test/StaticAssetsIntegrationTests.cs +++ b/src/StaticAssets/test/StaticAssetsIntegrationTests.cs @@ -989,6 +989,33 @@ public async Task IfUnmodifiedSinceDateLessThanLastModifiedShouldReturn412(HttpM Assert.Equal(HttpStatusCode.PreconditionFailed, res2.StatusCode); } + // 14.35.2 Range Retrieval Requests + // The presence of a Range header in an unconditional GET modifies + // what is returned if the GET is otherwise successful. In other + // words, the response carries a status code of 206 (Partial + // Content) instead of 200 (OK). + [Fact] + public async Task RangeGivesMatchingRange() + { + var client = await CreateClient(); + + var req1 = new HttpRequestMessage(HttpMethod.Get, "http://localhost/sample.txt"); + req1.Headers.Range = new RangeHeaderValue(0, 4); + var res1 = await client.SendAsync(req1); + + var req2 = new HttpRequestMessage(HttpMethod.Get, "http://localhost/sample.txt"); + req2.Headers.Range = new RangeHeaderValue(7, 11); + var res2 = await client.SendAsync(req2); + + Assert.Equal(HttpStatusCode.PartialContent, res1.StatusCode); + Assert.Equal("Hello", await res1.Content.ReadAsStringAsync()); + Assert.Equal(5, res1.Content.Headers.ContentLength); + + Assert.Equal(HttpStatusCode.PartialContent, res2.StatusCode); + Assert.Equal("World", await res2.Content.ReadAsStringAsync()); + Assert.Equal(5, res2.Content.Headers.ContentLength); + } + public static IEnumerable SupportedMethods => new[] { new [] { HttpMethod.Get }, diff --git a/src/Testing/src/xunit/SkipOnHelixAttribute.cs b/src/Testing/src/xunit/SkipOnHelixAttribute.cs index 38b376cb6808..2c7b66f75a67 100644 --- a/src/Testing/src/xunit/SkipOnHelixAttribute.cs +++ b/src/Testing/src/xunit/SkipOnHelixAttribute.cs @@ -66,7 +66,10 @@ private bool ShouldSkip() return true; } - return Queues.ToLowerInvariant().Split(';').Contains(targetQueue); + // We have "QueueName" and "QueueName.Open" queues for internal and public builds + // If we want to skip the test in the public queue, we want to skip it in the internal queue, and vice versa + return Queues.ToLowerInvariant().Split(';').Any(q => q.Equals(targetQueue, StringComparison.Ordinal) || q.StartsWith(targetQueue, StringComparison.Ordinal) || + targetQueue.StartsWith(q, StringComparison.Ordinal)); } public static bool OnHelix() => HelixHelper.OnHelix(); diff --git a/src/Tools/Tools.slnf b/src/Tools/Tools.slnf index 484313af8712..38dbc4a65ae9 100644 --- a/src/Tools/Tools.slnf +++ b/src/Tools/Tools.slnf @@ -29,6 +29,7 @@ "src\\Hosting\\Abstractions\\src\\Microsoft.AspNetCore.Hosting.Abstractions.csproj", "src\\Hosting\\Hosting\\src\\Microsoft.AspNetCore.Hosting.csproj", "src\\Hosting\\Server.Abstractions\\src\\Microsoft.AspNetCore.Hosting.Server.Abstractions.csproj", + "src\\Hosting\\TestHost\\src\\Microsoft.AspNetCore.TestHost.csproj", "src\\Html.Abstractions\\src\\Microsoft.AspNetCore.Html.Abstractions.csproj", "src\\Http\\Authentication.Abstractions\\src\\Microsoft.AspNetCore.Authentication.Abstractions.csproj", "src\\Http\\Authentication.Core\\src\\Microsoft.AspNetCore.Authentication.Core.csproj", @@ -109,9 +110,9 @@ "src\\Tools\\Extensions.ApiDescription.Server\\src\\Microsoft.Extensions.ApiDescription.Server.csproj", "src\\Tools\\FirstRunCertGenerator\\src\\Microsoft.AspNetCore.DeveloperCertificates.XPlat.csproj", "src\\Tools\\FirstRunCertGenerator\\test\\Microsoft.AspNetCore.DeveloperCertificates.XPlat.Tests.csproj", + "src\\Tools\\GetDocumentInsider\\sample\\GetDocumentSample.csproj", "src\\Tools\\GetDocumentInsider\\src\\GetDocument.Insider.csproj", "src\\Tools\\GetDocumentInsider\\tests\\GetDocumentInsider.Tests.csproj", - "src\\Tools\\GetDocumentInsider\\sample\\GetDocumentSample.csproj", "src\\Tools\\LinkabilityChecker\\LinkabilityChecker.csproj", "src\\Tools\\Microsoft.dotnet-openapi\\src\\Microsoft.dotnet-openapi.csproj", "src\\Tools\\Microsoft.dotnet-openapi\\test\\dotnet-microsoft.openapi.Tests.csproj", @@ -125,4 +126,4 @@ "src\\WebEncoders\\src\\Microsoft.Extensions.WebEncoders.csproj" ] } -} +} \ No newline at end of file diff --git a/src/Tools/dotnet-user-jwts/test/UserJwtsTests.cs b/src/Tools/dotnet-user-jwts/test/UserJwtsTests.cs index 71cc2cdb7d10..801e414fe95e 100644 --- a/src/Tools/dotnet-user-jwts/test/UserJwtsTests.cs +++ b/src/Tools/dotnet-user-jwts/test/UserJwtsTests.cs @@ -1,14 +1,19 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.IdentityModel.Tokens.Jwt; +using System.Security.Claims; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.RegularExpressions; +using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.InternalTesting; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration.UserSecrets; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Tools.Internal; using Xunit.Abstractions; -using System.Text.RegularExpressions; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.IdentityModel.Tokens.Jwt; namespace Microsoft.AspNetCore.Authentication.JwtBearer.Tools.Tests; @@ -62,6 +67,38 @@ public void Create_WritesGeneratedTokenToDisk() Assert.Contains("dotnet-user-jwts", File.ReadAllText(appsettings)); } + [Fact] + public async Task Create_TokenAcceptedByJwtBearerHandler() + { + var project = Path.Combine(fixture.CreateProject(), "TestProject.csproj"); + var appsettings = Path.Combine(Path.GetDirectoryName(project), "appsettings.Development.json"); + var secrets = PathHelper.GetSecretsPathFromSecretsId(fixture.TestSecretsId); + var app = new Program(_console); + + app.Run(["create", "--project", project, "-o", "token"]); + var token = _console.GetOutput().Trim(); + + var builder = WebApplication.CreateEmptyBuilder(new()); + builder.WebHost.UseTestServer(); + + builder.Configuration.AddJsonFile(appsettings); + builder.Configuration.AddJsonFile(secrets); + + builder.Services.AddRouting(); + builder.Services.AddAuthentication().AddJwtBearer(); + builder.Services.AddAuthorization(); + + using var webApp = builder.Build(); + webApp.MapGet("/secret", (ClaimsPrincipal user) => $"Hello {user.Identity?.Name}!") + .RequireAuthorization(); + + await webApp.StartAsync(); + + var client = webApp.GetTestClient(); + client.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); + Assert.Equal($"Hello {Environment.UserName}!", await client.GetStringAsync("/secret")); + } + [Fact] public void Create_CanModifyExistingScheme() { diff --git a/src/Tools/dotnet-user-jwts/test/dotnet-user-jwts.Tests.csproj b/src/Tools/dotnet-user-jwts/test/dotnet-user-jwts.Tests.csproj index 5ad17868a98a..b362c30d0611 100644 --- a/src/Tools/dotnet-user-jwts/test/dotnet-user-jwts.Tests.csproj +++ b/src/Tools/dotnet-user-jwts/test/dotnet-user-jwts.Tests.csproj @@ -14,4 +14,10 @@ + + + + + + diff --git a/src/submodules/googletest b/src/submodules/googletest index d14403194054..eb2d85edd0bf 160000 --- a/src/submodules/googletest +++ b/src/submodules/googletest @@ -1 +1 @@ -Subproject commit d144031940543e15423a25ae5a8a74141044862f +Subproject commit eb2d85edd0bff7a712b6aff147cd9f789f0d7d0b