diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 569597b6a..63d5b270f 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -16,6 +16,7 @@ body:
- disk-buffering
- gcp-auth-extension
- gcp-resources
+ - ibm-mq-metrics
- jfr-connection
- jfr-events
- jmx-metrics
@@ -57,3 +58,10 @@ body:
attributes:
label: Additional context
description: Any additional information you think may be relevant to this issue.
+ - type: dropdown
+ attributes:
+ label: Tip
+ description: This element is static, used to render a helpful sub-heading for end-users and community members to help prioritize issues. Please leave as is.
+ options:
+ - [React](https://github.blog/news-insights/product-news/add-reactions-to-pull-requests-issues-and-comments/) with 👍 to help prioritize this issue. Please use comments to provide useful context, avoiding `+1` or `me too`, to help us triage it. Learn more [here](https://opentelemetry.io/community/end-user/issue-participation/).
+ default: 0
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index 59450b16f..45edd560b 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -16,6 +16,7 @@ body:
- disk-buffering
- gcp-auth-extension
- gcp-resources
+ - ibm-mq-metrics
- jfr-connection
- jfr-events
- jmx-metrics
@@ -48,3 +49,10 @@ body:
attributes:
label: Additional context
description: Add any other context or screenshots about the feature request here.
+ - type: dropdown
+ attributes:
+ label: Tip
+ description: This element is static, used to render a helpful sub-heading for end-users and community members to help prioritize issues. Please leave as is.
+ options:
+ - [React](https://github.blog/news-insights/product-news/add-reactions-to-pull-requests-issues-and-comments/) with 👍 to help prioritize this issue. Please use comments to provide useful context, avoiding `+1` or `me too`, to help us triage it. Learn more [here](https://opentelemetry.io/community/end-user/issue-participation/).
+ default: 0
diff --git a/.github.amrom.workers.devponent_owners.yml b/.github.amrom.workers.devponent_owners.yml
index de4544864..6b74234ad 100644
--- a/.github.amrom.workers.devponent_owners.yml
+++ b/.github.amrom.workers.devponent_owners.yml
@@ -1,14 +1,14 @@
-# this file is used by .github/workflows/assign-reviewers.yml
+# this file is used by .github/workflows/assign-reviewers.yml and .github/workflows/assign-issue-owners.yml
#
# NOTE component owners must be members of the GitHub OpenTelemetry organization
# so that they can be added to @open-telemetry/java-contrib-triagers
-# which in turn is required for them to be auto-assigned as reviewers by the automation
+# which in turn is required for them to be auto-assigned as reviewers and issue assignees by the automation
#
# NOTE when updating this file, don't forget to update the README.md files in the associated
# components also
#
# NOTE when adding/updating one of the component names, don't forget to update the associated
-# `comp:*` labels
+# `component:*` labels (used for both PR reviews and issue assignment)
components:
aws-resources:
- wangzlei
@@ -65,7 +65,7 @@ components:
- LikeTheSalad
- breedx-splk
- jack-berg
- prometheus-collector:
+ prometheus-client-bridge:
- jkwatson
resource-providers:
- breedx-splk
@@ -89,3 +89,7 @@ components:
- sylvainjuge
opamp-client:
- LikeTheSalad
+ - jackshirazi
+ ibm-mq-metrics:
+ - breedx-splk
+ - atoulme
diff --git a/.github/config/lychee.toml b/.github/config/lychee.toml
new file mode 100644
index 000000000..071c66fd8
--- /dev/null
+++ b/.github/config/lychee.toml
@@ -0,0 +1,17 @@
+timeout = 30
+retry_wait_time = 5
+max_retries = 6
+max_concurrency = 4
+
+# Check link anchors
+include_fragments = true
+
+remap = [
+ # workaround for https://github.com/lycheeverse/lychee/issues/1729
+ "https://github.com/(.*?)/(.*?)/blob/(.*?)/(.*#.*)$ https://raw.githubusercontent.com/$1/$2/$3/$4"
+]
+
+exclude = [
+ # excluding links to pull requests and issues is done for performance
+ "^https://github.com/open-telemetry/opentelemetry-java-contrib/(issues|pull)/\\d+$",
+]
diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md
new file mode 100644
index 000000000..d7f693363
--- /dev/null
+++ b/.github/copilot-instructions.md
@@ -0,0 +1,66 @@
+# Copilot Instructions for OpenTelemetry Java Contrib
+
+This repository provides observability instrumentation for Java applications.
+
+## Code Review Priorities
+
+### Style Guide Compliance
+
+**PRIORITY**: Verify that all code changes follow the [Style Guide](../docs/style-guide.md). Check:
+
+- Code formatting (auto-formatting, static imports, class organization)
+- Java language conventions (`final` usage, `@Nullable` annotations, `Optional` usage)
+- Performance constraints (hot path allocations)
+- Implementation patterns (SPI registration, configuration conventions)
+- Gradle conventions (Kotlin DSL, plugin usage, module naming)
+- Documentation standards (README files, deprecation processes)
+
+### Critical Areas
+
+- **Public APIs**: Changes affect downstream users and require careful review
+- **Performance**: Instrumentation must have minimal overhead
+- **Thread Safety**: Ensure safe concurrent access patterns
+- **Memory Management**: Prevent leaks and excessive allocations
+
+### Quality Standards
+
+- Proper error handling with appropriate logging levels
+- OpenTelemetry specification and semantic convention compliance
+- Resource cleanup and lifecycle management
+- Comprehensive unit tests for new functionality
+
+### Test suites
+
+This project uses gradle 9 which requires specifying test classes and paths explicitly.
+
+For example, this will NOT work because it registers a `Test` without specifying the test classes or paths:
+
+```kotlin
+tasks.register("IntegrationTestUserCreds") {
+ dependsOn(tasks.shadowJar)
+ dependsOn(tasks.named("copyAgent"))
+ ...
+}
+```
+
+This is fixed by specifying the test classes and classpath explicitly:
+
+```kotlin
+tasks.register("IntegrationTestUserCreds") {
+ testClassesDirs = sourceSets.test.get().output.classesDirs
+ classpath = sourceSets.test.get().runtimeClasspath
+
+ dependsOn(tasks.shadowJar)
+ dependsOn(tasks.named("copyAgent"))
+ ...
+}
+```
+
+## Coding Agent Instructions
+
+When implementing changes or new features:
+
+1. Follow all [Style Guide](../docs/style-guide.md) conventions and the Code Review Priorities above
+2. Run tests to ensure they still pass (use `./gradlew test` and `./gradlew integrationTest` as needed)
+3. **Always run `./gradlew spotlessApply`** after making code changes to ensure proper formatting
+4. Run markdown lint to ensure it still passes: `npx markdownlint-cli@0.45.0 -c .github/config/markdownlint.yml **/*.md`
diff --git a/.github/renovate.json5 b/.github/renovate.json5
index 0efdf7d3a..a7d6e7672 100644
--- a/.github/renovate.json5
+++ b/.github/renovate.json5
@@ -1,25 +1,43 @@
{
$schema: 'https://docs.renovatebot.com/renovate-schema.json',
extends: [
- 'config:recommended',
- 'docker:pinDigests',
- 'helpers:pinGitHubActionDigests',
+ 'config:best-practices',
+ 'helpers:pinGitHubActionDigestsToSemver',
],
ignorePresets: [
':ignoreModulesAndTests', // needed to keep maven-extension test pom files up-to-date
+ 'workarounds:javaLTSVersions', // Allow all Java major versions, not just LTS
+ ],
+ prHourlyLimit: 5, // we have a large number of parallel runners
+ labels: [
+ 'dependencies',
],
- prHourlyLimit: 5,
packageRules: [
{
- // this is to reduce the number of renovate PRs
- matchManagers: [
- 'github-actions',
- 'dockerfile',
+ // reduces the number of Renovate PRs
+ // (patch updates are typically non-breaking)
+ groupName: 'all patch versions',
+ matchUpdateTypes: [
+ 'patch',
+ ],
+ schedule: [
+ 'before 8am every weekday',
],
- extends: [
- 'schedule:weekly',
+ },
+ {
+ // avoids these Renovate PRs from trickling in throughout the week
+ // (consolidating the review process)
+ matchUpdateTypes: [
+ 'minor',
+ 'major',
+ ],
+ schedule: [
+ 'before 8am on Monday',
+ ],
+ matchPackageNames: [
+ '!io.opentelemetry:**',
+ '!io.opentelemetry.*:**',
],
- groupName: 'weekly update',
},
{
matchPackageNames: [
@@ -50,6 +68,17 @@
],
enabled: false,
},
+ {
+ // junit 6+ requires Java 17+
+ matchPackageNames: [
+ 'org.junit:**',
+ 'org.junit.jupiter:**',
+ ],
+ matchUpdateTypes: [
+ 'major',
+ ],
+ enabled: false,
+ },
{
// junit-pioneer 2+ requires Java 11+
matchPackageNames: [
@@ -67,7 +96,7 @@
],
enabled: false,
matchPackageNames: [
- 'org.mockito:{/,}**',
+ 'org.mockito:**',
],
},
{
@@ -105,25 +134,45 @@
matchCurrentVersion: '3.5.0',
enabled: false,
matchPackageNames: [
- 'org.apache.maven:{/,}**',
+ 'org.apache.maven:**',
],
},
{
groupName: 'spotless packages',
matchPackageNames: [
- 'com.diffplug.spotless{/,}**',
+ 'com.diffplug.spotless',
+ 'com.diffplug.spotless:**',
],
},
{
groupName: 'hipparchus packages',
matchPackageNames: [
- 'org.hipparchus{/,}**',
+ 'org.hipparchus:**',
],
},
{
groupName: 'errorprone packages',
matchPackageNames: [
- 'com.google.errorprone{/,}**',
+ 'com.google.errorprone:**',
+ ],
+ },
+ {
+ groupName: 'jackson packages',
+ matchPackageNames: [
+ 'com.fasterxml.jackson:**',
+ 'com.fasterxml.jackson.core:**',
+ ],
+ },
+ {
+ groupName: 'develocity packages',
+ matchPackageNames: [
+ 'com.gradle.develocity:**',
+ ],
+ },
+ {
+ groupName: 'bouncycastle packages',
+ matchPackageNames: [
+ 'org.bouncycastle:**',
],
},
{
@@ -133,7 +182,7 @@
],
enabled: false,
matchPackageNames: [
- 'org.openjdk.jmc{/,}**',
+ 'org.openjdk.jmc:**',
],
},
{
@@ -144,7 +193,7 @@
matchCurrentVersion: '5.0.0',
enabled: false,
matchPackageNames: [
- 'jakarta.servlet:{/,}**',
+ 'jakarta.servlet:**',
],
},
{
@@ -157,7 +206,7 @@
],
enabled: false,
matchPackageNames: [
- 'org.springframework.boot{/,}**',
+ 'org.springframework.boot:**',
],
},
],
@@ -165,12 +214,34 @@
{
customType: 'regex',
datasourceTemplate: 'npm',
- fileMatch: [
- '^.github/workflows/',
+ managerFilePatterns: [
+ '.github/workflows/**',
],
matchStrings: [
'npx (?[^@]+)@(?[^\\s]+)',
],
},
+ {
+ customType: 'regex',
+ datasourceTemplate: 'java-version',
+ managerFilePatterns: [
+ '.github/workflows/**',
+ ],
+ matchStrings: [
+ '(?\\d+) # renovate: datasource=java-version',
+ ],
+ depNameTemplate: 'java',
+ extractVersionTemplate: '^(?\\d+)',
+ },
+ {
+ customType: 'regex',
+ datasourceTemplate: 'github-releases',
+ managerFilePatterns: [
+ '**/build.gradle.kts',
+ ],
+ matchStrings: [
+ '"https://github.com/(?[^/]+/[^/]+)/zipball/(?.+?)"',
+ ],
+ },
],
}
diff --git a/.github/repository-settings.md b/.github/repository-settings.md
index 0e3d9240c..22299364d 100644
--- a/.github/repository-settings.md
+++ b/.github/repository-settings.md
@@ -1,24 +1,12 @@
# Repository settings
-Same
-as [opentelemetry-java-instrumentation repository settings](https://github.com/open-telemetry/opentelemetry-java-instrumentation/blob/main/.github/repository-settings.md#repository-settings),
-except for
+This document describes any changes that have been made to the
+settings in this repository outside the settings tracked in the
+private admin repo.
-- The rules for `gh-pages` and `cloudfoundry` branches are not relevant in this repository.
+## Merge queue for `main`
-and the enablement of merge queues below.
-
-## Merge queue
-
-Needs to be enabled using classic branch protection (instead of rule set)
-because of our use of the classic branch protection "Restrict who can push to matching branches"
-which otherwise will block the merge queue from merging to main.
-
-### Restrict branch creation
-
-- Additional exclusion for `gh-readonly-queue/main/pr-*`
-
-### Classic branch protection for `main`
+[The admin repo doesn't currently support tracking merge queue settings.]
- Require merge queue: CHECKED
- Build concurrency: 5
diff --git a/.github/scripts/draft-change-log-entries.sh b/.github/scripts/draft-change-log-entries.sh
index 845c92672..489c1fe4d 100755
--- a/.github/scripts/draft-change-log-entries.sh
+++ b/.github/scripts/draft-change-log-entries.sh
@@ -35,6 +35,7 @@ component_names["consistent-sampling/"]="Consistent sampling"
component_names["disk-buffering/"]="Disk buffering"
component_names["gcp-resources/"]="GCP resources"
component_names["gcp-auth-extension/"]="GCP authentication extension"
+component_names["ibm-mq-metrics/"]="IBM MQ metrics"
component_names["inferred-spans/"]="Inferred spans"
component_names["jfr-connection/"]="JFR connection"
component_names["jfr-events/"]="JFR events"
@@ -44,6 +45,7 @@ component_names["kafka-exporter/"]="Kafka exporter"
component_names["maven-extension/"]="Maven extension"
component_names["micrometer-meter-provider/"]="Micrometer MeterProvider"
component_names["noop-api/"]="No-op API"
+component_names["opamp-client/"]="OpAMP client"
component_names["processors/"]="Telemetry processors"
component_names["prometheus-client-bridge/"]="Prometheus client bridge"
component_names["runtime-attach/"]="Runtime attach"
diff --git a/.github/scripts/package-lock.json b/.github/scripts/package-lock.json
new file mode 100644
index 000000000..f364e1106
--- /dev/null
+++ b/.github/scripts/package-lock.json
@@ -0,0 +1,27 @@
+{
+ "name": "github-scripts",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "github-scripts",
+ "version": "1.0.0",
+ "dependencies": {
+ "yaml": "2.8.1"
+ }
+ },
+ "node_modules/yaml": {
+ "version": "2.8.1",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz",
+ "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==",
+ "license": "ISC",
+ "bin": {
+ "yaml": "bin.mjs"
+ },
+ "engines": {
+ "node": ">= 14.6"
+ }
+ }
+ }
+}
diff --git a/.github/scripts/package.json b/.github/scripts/package.json
new file mode 100644
index 000000000..efca3b345
--- /dev/null
+++ b/.github/scripts/package.json
@@ -0,0 +1,9 @@
+{
+ "//": "Dependencies for GitHub Actions workflows that use actions/github-script",
+ "name": "github-scripts",
+ "version": "1.0.0",
+ "private": true,
+ "dependencies": {
+ "yaml": "2.8.1"
+ }
+}
diff --git a/.github/scripts/update-version.sh b/.github/scripts/update-version.sh
index 2ec064776..39873d0f1 100755
--- a/.github/scripts/update-version.sh
+++ b/.github/scripts/update-version.sh
@@ -10,3 +10,5 @@ fi
sed -Ei "s/val stableVersion = \"[^\"]*\"/val stableVersion = \"$version\"/" version.gradle.kts
sed -Ei "s/val alphaVersion = \"[^\"]*\"/val alphaVersion = \"$alpha_version\"/" version.gradle.kts
+
+sed -Ei "1 s/(Comparing source compatibility of [a-z-]+)-[0-9]+\.[0-9]+\.[0-9]+(-SNAPSHOT)?.jar/\1-$version.jar/" docs/apidiffs/current_vs_latest/*.txt
diff --git a/.github/workflows/assign-issue-owners.yml b/.github/workflows/assign-issue-owners.yml
new file mode 100644
index 000000000..7d7acba67
--- /dev/null
+++ b/.github/workflows/assign-issue-owners.yml
@@ -0,0 +1,78 @@
+---
+name: Assign issue owners
+
+on:
+ issues:
+ types: [labeled]
+
+permissions:
+ contents: read
+
+jobs:
+ assign-owners:
+ permissions:
+ contents: read
+ issues: write
+ runs-on: ubuntu-latest
+ if: startsWith(github.event.label.name, 'component:')
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Install yaml dependency used below
+ run: npm install .github/scripts
+
+ - name: Parse component label and assign owners
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ with:
+ script: |
+ const fs = require('fs');
+ const { parse } = require('yaml');
+
+ // Extract component name from label
+ const labelName = context.payload.label.name;
+
+ if (!labelName.startsWith('component:')) {
+ core.setFailed('Label does not match expected pattern');
+ return;
+ }
+
+ const componentName = labelName.replace('component:', '');
+ console.log(`Processing component: ${componentName}`);
+
+ // Read and parse component_owners.yml
+ const yamlContent = fs.readFileSync('.github.amrom.workers.devponent_owners.yml', 'utf8');
+ const data = parse(yamlContent);
+
+ if (!data || !data.components) {
+ core.setFailed('Invalid component_owners.yml structure');
+ return;
+ }
+
+ const components = data.components;
+
+ if (!(componentName in components)) {
+ core.setFailed(`Component '${componentName}' not found in component_owners.yml`);
+ return;
+ }
+
+ const owners = components[componentName];
+
+ if (!owners || owners.length === 0) {
+ core.setFailed(`No owners found for component '${componentName}'`);
+ return;
+ }
+
+ console.log(`Found owners: ${owners.join(', ')}`);
+
+ // Assign the issue to the owners
+ const issueNumber = context.payload.issue.number;
+
+ await github.rest.issues.addAssignees({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: issueNumber,
+ assignees: owners
+ });
+
+ console.log(`Successfully assigned issue #${issueNumber} to ${owners.join(', ')}`);
diff --git a/.github/workflows/assign-reviewers.yml b/.github/workflows/assign-reviewers.yml
index 84a7a77af..d1cf86ce8 100644
--- a/.github/workflows/assign-reviewers.yml
+++ b/.github/workflows/assign-reviewers.yml
@@ -18,6 +18,7 @@ jobs:
pull-requests: write # for assigning reviewers
runs-on: ubuntu-latest
steps:
- - uses: open-telemetry/assign-reviewers-action@ab8aca8056f3b5af18282b54baa57a852c47abf8 # main
+ - uses: dyladan/component-owners@58bd86e9814d23f1525d0a970682cead459fa783 # v0.1.0
with:
config-file: .github.amrom.workers.devponent_owners.yml
+ assign-owners: false
diff --git a/.github/workflows/auto-spotless-apply.yml b/.github/workflows/auto-spotless-apply.yml
new file mode 100644
index 000000000..b363b4977
--- /dev/null
+++ b/.github/workflows/auto-spotless-apply.yml
@@ -0,0 +1,94 @@
+name: Auto spotless apply
+on:
+ workflow_run:
+ workflows:
+ - "Auto spotless check"
+ types:
+ - completed
+
+permissions:
+ contents: read
+
+jobs:
+ apply:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
+ steps:
+ - name: Download patch
+ uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
+ with:
+ run-id: ${{ github.event.workflow_run.id }}
+ path: ${{ runner.temp }}
+ merge-multiple: true
+ github-token: ${{ github.token }}
+
+ - id: unzip-patch
+ name: Unzip patch
+ working-directory: ${{ runner.temp }}
+ run: |
+ if [ -f patch ]; then
+ echo "exists=true" >> $GITHUB_OUTPUT
+ fi
+
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ if: steps.unzip-patch.outputs.exists == 'true'
+ id: otelbot-token
+ with:
+ app-id: 1296620
+ private-key: ${{ secrets.OTELBOT_JAVA_CONTRIB_PRIVATE_KEY }}
+
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ if: steps.unzip-patch.outputs.exists == 'true'
+ with:
+ repository: "${{ github.event.workflow_run.head_repository.full_name }}"
+ ref: "${{ github.event.workflow_run.head_branch }}"
+ token: ${{ steps.otelbot-token.outputs.token }}
+
+ - name: Use CLA approved github bot
+ if: steps.unzip-patch.outputs.exists == 'true'
+ # IMPORTANT do not call the .github/scripts/use-cla-approved-bot.sh
+ # since that script could have been compromised in the PR branch
+ run: |
+ git config user.name otelbot
+ git config user.email 197425009+otelbot@users.noreply.github.com
+
+ - name: Apply patch and push
+ if: steps.unzip-patch.outputs.exists == 'true'
+ run: |
+ git apply "${{ runner.temp }}/patch"
+ git commit -a -m "./gradlew spotlessApply"
+ git push
+
+ - id: get-pr
+ if: steps.unzip-patch.outputs.exists == 'true'
+ name: Get PR
+ env:
+ GH_REPO: ${{ github.repository }}
+ GH_TOKEN: ${{ github.token }}
+ PR_BRANCH: |-
+ ${{
+ (github.event.workflow_run.head_repository.owner.login != github.event.workflow_run.repository.owner.login)
+ && format('{0}:{1}', github.event.workflow_run.head_repository.owner.login, github.event.workflow_run.head_branch)
+ || github.event.workflow_run.head_branch
+ }}
+ run: |
+ number=$(gh pr view "$PR_BRANCH" --json number --jq .number)
+ echo "number=$number" >> $GITHUB_OUTPUT
+
+ - if: steps.unzip-patch.outputs.exists == 'true' && success()
+ env:
+ GH_REPO: ${{ github.repository }}
+ GH_TOKEN: ${{ steps.otelbot-token.outputs.token }}
+ PR_NUMBER: ${{ steps.get-pr.outputs.number }}
+ run: |
+ gh pr comment $PR_NUMBER --body "🔧 The result from spotlessApply was committed to the PR branch."
+
+ - if: steps.unzip-patch.outputs.exists == 'true' && failure()
+ env:
+ GH_REPO: ${{ github.repository }}
+ GH_TOKEN: ${{ steps.otelbot-token.outputs.token }}
+ PR_NUMBER: ${{ steps.get-pr.outputs.number }}
+ run: |
+ gh pr comment $PR_NUMBER --body "❌ The result from spotlessApply could not be committed to the PR branch, see logs: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID."
diff --git a/.github/workflows/auto-spotless-check.yml b/.github/workflows/auto-spotless-check.yml
new file mode 100644
index 000000000..e5ec0e2e4
--- /dev/null
+++ b/.github/workflows/auto-spotless-check.yml
@@ -0,0 +1,53 @@
+name: Auto spotless check
+on:
+ pull_request:
+ types:
+ - opened
+ - synchronize
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
+ cancel-in-progress: true
+
+permissions:
+ contents: read
+
+jobs:
+ check:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+ with:
+ cache-read-only: true
+
+ - name: Check out PR branch
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: gh pr checkout ${{ github.event.pull_request.number }}
+
+ - name: Spotless
+ run: ./gradlew spotlessApply
+
+ - id: create-patch
+ name: Create patch file
+ run: |
+ git diff > patch
+ if [ -s patch ]; then
+ echo "exists=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Upload patch file
+ if: steps.create-patch.outputs.exists == 'true'
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+ with:
+ path: patch
+ name: patch
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index e34c77489..a798378c6 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -12,7 +12,7 @@ permissions:
jobs:
backport:
permissions:
- contents: write # for Git to git push
+ contents: write # for git push to PR branch
runs-on: ubuntu-latest
steps:
- run: |
@@ -21,7 +21,7 @@ jobs:
exit 1
fi
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# history is needed to run git cherry-pick below
fetch-depth: 0
@@ -29,7 +29,7 @@ jobs:
- name: Use CLA approved bot
run: .github/scripts/use-cla-approved-bot.sh
- - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: otelbot-token
with:
app-id: ${{ vars.OTELBOT_APP_ID }}
@@ -48,6 +48,15 @@ jobs:
git checkout -b $branch
git cherry-pick $commit
+
+ if git diff --name-only HEAD~1 HEAD | grep -q '^\.github/workflows/'; then
+ echo "::error::This PR contains changes to workflow files (.github/workflows/)."
+ echo "::error::Workflow files cannot be automatically backported because the standard"
+ echo "::error::GitHub token doesn't have the required 'workflow' write permission."
+ echo "::error::Please backport this PR manually."
+ exit 1
+ fi
+
git push --set-upstream origin $branch
gh pr create --title "[$GITHUB_REF_NAME] $title" \
--body "Clean cherry-pick of #$NUMBER to the \`$GITHUB_REF_NAME\` branch." \
diff --git a/.github/workflows/build-common.yml b/.github/workflows/build-common.yml
new file mode 100644
index 000000000..20ea2f153
--- /dev/null
+++ b/.github/workflows/build-common.yml
@@ -0,0 +1,162 @@
+name: Reusable - Common
+
+on:
+ workflow_call:
+ inputs:
+ cache-read-only:
+ type: boolean
+ required: false
+ no-build-cache:
+ type: boolean
+ required: false
+ max-test-retries:
+ type: number
+ required: false
+ default: 0
+
+permissions:
+ contents: read
+
+jobs:
+ spotless:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up Gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+ with:
+ cache-read-only: ${{ inputs.cache-read-only }}
+
+ - name: Spotless
+ run: ./gradlew spotlessCheck ${{ inputs.no-build-cache && '--no-build-cache' || '' }}
+
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up Gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+ with:
+ cache-read-only: ${{ inputs.cache-read-only }}
+
+ - name: Build
+ run: ./gradlew build -x spotlessCheck -x test ${{ inputs.no-build-cache && '--no-build-cache' || '' }}
+
+ - name: Check for jApiCmp diffs
+ # The jApiCmp diff compares current to latest, which isn't appropriate for release branches
+ if: ${{ !startsWith(github.ref_name, 'release/') && !startsWith(github.base_ref, 'release/') }}
+ run: |
+ # need to "git add" in case any generated files did not already exist
+ git add docs/apidiffs
+ if git diff --cached --quiet
+ then
+ echo "No diff detected."
+ else
+ echo "Diff detected - did you run './gradlew jApiCmp'?"
+ git diff --cached --name-only
+ git diff --cached
+ exit 1
+ fi
+
+ test:
+ name: Test
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os:
+ - ubuntu-latest
+ - windows-latest
+ test-java-version:
+ - 8
+ - 11
+ - 17
+ - 21
+ - 25 # renovate: datasource=java-version
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - id: setup-java-test
+ name: Set up Java ${{ matrix.test-java-version }} for tests
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: ${{ matrix.test-java-version }}
+
+ - id: setup-java
+ name: Set up Java for build
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up Gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+ with:
+ cache-read-only: ${{ inputs.cache-read-only }}
+
+ - name: Test
+ run: >
+ ./gradlew test
+ "-PtestJavaVersion=${{ matrix.test-java-version }}"
+ "-Porg.gradle.java.installations.paths=${{ steps.setup-java-test.outputs.path }}"
+ "-Porg.gradle.java.installations.auto-download=false"
+ "-PmaxTestRetries=${{ inputs.max-test-retries }}"
+ ${{ inputs.no-build-cache && '--no-build-cache' || '' }}
+
+ - name: Build scan
+ if: ${{ !cancelled() && hashFiles('build-scan.txt') != '' }}
+ run: cat build-scan.txt
+
+ integration-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up Gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+ with:
+ cache-read-only: ${{ inputs.cache-read-only }}
+
+ - name: Integration test
+ run: ./gradlew integrationTest "-PmaxTestRetries=${{ inputs.max-test-retries }}" ${{ inputs.no-build-cache && '--no-build-cache' || '' }}
+
+ - name: Build scan
+ if: ${{ !cancelled() && hashFiles('build-scan.txt') != '' }}
+ run: cat build-scan.txt
+
+ - name: Save integration test results
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+ if: always()
+ with:
+ name: integration-test-results
+ path: jmx-metrics/build/reports/tests/integrationTest
+
+ markdown-lint-check:
+ uses: ./.github/workflows/reusable-markdown-lint.yml
+
+ misspell-check:
+ uses: ./.github/workflows/reusable-misspell-check.yml
+
+ shell-script-check:
+ uses: ./.github/workflows/reusable-shell-script-check.yml
diff --git a/.github/workflows/build-daily.yml b/.github/workflows/build-daily.yml
new file mode 100644
index 000000000..7704cab52
--- /dev/null
+++ b/.github/workflows/build-daily.yml
@@ -0,0 +1,35 @@
+name: Build Daily
+
+on:
+ workflow_dispatch:
+ schedule:
+ # Run daily at 7:30 AM UTC
+ - cron: '30 7 * * *'
+
+permissions:
+ contents: read
+
+jobs:
+ common:
+ uses: ./.github/workflows/build-common.yml
+ with:
+ no-build-cache: true
+
+ link-check:
+ uses: ./.github/workflows/reusable-link-check.yml
+
+ workflow-notification:
+ permissions:
+ contents: read
+ issues: write
+ if: always()
+ needs:
+ - common
+ - link-check
+ uses: ./.github/workflows/reusable-workflow-notification.yml
+ with:
+ success: >-
+ ${{
+ needs.common.result == 'success' &&
+ needs.link-check.result == 'success'
+ }}
diff --git a/.github/workflows/build-pull-request.yml b/.github/workflows/build-pull-request.yml
new file mode 100644
index 000000000..d41d87f5f
--- /dev/null
+++ b/.github/workflows/build-pull-request.yml
@@ -0,0 +1,34 @@
+name: Build Pull Request
+
+on:
+ pull_request:
+ merge_group:
+
+permissions:
+ contents: read
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ common:
+ uses: ./.github/workflows/build-common.yml
+ with:
+ cache-read-only: true
+ # retry in merge queue to avoid unnecessary failures
+ max-test-retries: ${{ github.event_name == 'merge_group' && 5 || 0 }}
+
+ link-check:
+ uses: ./.github/workflows/reusable-link-check.yml
+
+ required-status-check:
+ if: always()
+ needs:
+ - common
+ - link-check # wait for link check to complete, but don't require it to pass for merging
+ runs-on: ubuntu-latest
+ steps:
+ # The reusable workflow success depends on all its jobs passing
+ - if: needs.common.result != 'success'
+ run: exit 1 # fail
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 787afe869..c16ad6cd8 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -5,176 +5,40 @@ on:
branches:
- main
- release/*
- pull_request:
- merge_group:
- workflow_dispatch:
permissions:
contents: read
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
- cancel-in-progress: true
-
jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- distribution: temurin
- java-version: 17
-
- - name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- with:
- cache-read-only: ${{ github.event_name == 'pull_request' }}
- - name: Gradle build and test
- run: ./gradlew build -x test
-
- test:
- name: test (${{ matrix.test-java-version }})
- runs-on: ubuntu-latest
- strategy:
- matrix:
- test-java-version:
- - 8
- - 11
- - 17
- - 21
- - 23
- fail-fast: false
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - id: setup-test-java
- name: Set up JDK ${{ matrix.test-java-version }} for running tests
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- # using zulu because new releases get published quickly
- distribution: zulu
- java-version: ${{ matrix.test-java-version }}
-
- - name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- distribution: temurin
- java-version: 17
-
- - name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- with:
- cache-read-only: ${{ github.event_name == 'pull_request' }}
- - name: Gradle test
- run: >
- ./gradlew test
- -PtestJavaVersion=${{ matrix.test-java-version }}
- -Porg.gradle.java.installations.paths=${{ steps.setup-test-java.outputs.path }}
- -Porg.gradle.java.installations.auto-download=false
-
- integration-test:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- distribution: temurin
- java-version: 17
-
- - name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- with:
- cache-read-only: ${{ github.event_name == 'pull_request' }}
+ common:
+ uses: ./.github/workflows/build-common.yml
- - name: Integration test
- run: ./gradlew integrationTest
-
- - name: Save integration test results
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- if: always()
- with:
- name: integration-test-results
- path: jmx-metrics/build/reports/tests/integrationTest
-
- markdown-link-check:
- uses: ./.github/workflows/reusable-markdown-link-check.yml
-
- markdown-lint-check:
- uses: ./.github/workflows/reusable-markdown-lint.yml
-
- misspell-check:
- uses: ./.github/workflows/reusable-misspell-check.yml
-
- shell-script-check:
- uses: ./.github/workflows/reusable-shell-script-check.yml
+ # Link check is disabled for push events to avoid unnecessary CI failures
+ # (these failures will instead be captured by the daily scheduled run)
+ # and for release branches to avoid unnecessary maintenance if external links break
publish-snapshots:
- # the condition is on the steps below instead of here on the job, because skipping the job
- # causes the job to show up as canceled in the GitHub UI which prevents the PR build section
- # from collapsing when everything (else) is green
- #
- # and the name is updated when the steps below are skipped which makes what's happening clearer
- # in the GitHub UI
- #
- # note: the condition below has to be written so that '' is last since it resolves to false
- # and so would not short-circuit if used in the second-last position
- name: publish-snapshots${{ (github.ref_name != 'main' || github.repository != 'open-telemetry/opentelemetry-java-contrib') && ' (skipped)' || '' }}
needs:
- # intentionally not blocking snapshot publishing on markdown-link-check or misspell-check
- - build
- - integration-test
+ - common
runs-on: ubuntu-latest
+ # skipping release branches because the versions in those branches are not snapshots
+ if: github.ref_name == 'main' && github.repository == 'open-telemetry/opentelemetry-java-contrib'
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: 17
- name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- # skipping release branches because the versions in those branches are not snapshots
- # (also this skips pull requests)
- if: ${{ github.ref_name == 'main' && github.repository == 'open-telemetry/opentelemetry-java-contrib' }}
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+
- name: Build and publish snapshots
- if: ${{ github.ref_name == 'main' && github.repository == 'open-telemetry/opentelemetry-java-contrib' }}
run: ./gradlew assemble publishToSonatype
env:
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_KEY: ${{ secrets.SONATYPE_KEY }}
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
GPG_PASSWORD: ${{ secrets.GPG_PASSWORD }}
-
- required-status-check:
- if: (github.event_name == 'pull_request' || github.event_name == 'merge_group') && always()
- needs:
- - build
- - test
- - integration-test
- - markdown-lint-check
- - misspell-check
- - shell-script-check
- runs-on: ubuntu-latest
- steps:
- # only the build and test checks are required for release branch PRs in order
- # to avoid any unnecessary release branch maintenance (especially for patches)
- - if: |
- needs.build.result != 'success' ||
- needs.test.result != 'success' ||
- needs.integration-test.result != 'success' ||
- (
- !startsWith(github.base_ref, 'release/') &&
- (
- needs.markdown-lint-check.result != 'success' ||
- needs.misspell-check.result != 'success' ||
- needs.shell-script-check.result != 'success'
- )
- )
- run: exit 1 # fail
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index ff49bce2f..3b845ff19 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,17 +1,20 @@
name: CodeQL
on:
- push:
+ pull_request:
branches:
- main
- release/*
- pull_request:
# TODO (trask) adding this to the merge queue causes the merge queue to fail
# see related issues
# - https://github.com/github/codeql-action/issues/1572
# - https://github.com/github/codeql-action/issues/1537
# - https://github.com/github/codeql-action/issues/2691
# merge_group:
+ push:
+ branches:
+ - main
+ - release/*
schedule:
- cron: "29 13 * * 2" # weekly at 13:29 UTC on Tuesday
@@ -20,36 +23,48 @@ permissions:
jobs:
analyze:
+ name: Analyze (${{ matrix.language }})
permissions:
contents: read
actions: read # for github/codeql-action/init to get workflow details
security-events: write # for github/codeql-action/analyze to upload SARIF results
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - language: actions
+ - language: java
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Java 17
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
+ if: matrix.language == 'java'
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: 17
- name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
+ if: matrix.language == 'java'
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
- name: Initialize CodeQL
- uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15
+ uses: github/codeql-action/init@a8d1ac45b9a34d11fe398d5503176af0d06b303e # v3.30.7
with:
- languages: java, actions
- # using "latest" helps to keep up with the latest Kotlin support
+ languages: ${{ matrix.language }}
+ # using "linked" helps to keep up with the latest Kotlin support
# see https://github.com/github/codeql-action/issues/1555#issuecomment-1452228433
- tools: latest
+ tools: linked
- name: Assemble
+ if: matrix.language == 'java'
# --no-build-cache is required for codeql to analyze all modules
# --no-daemon is required for codeql to observe the compilation
# (see https://docs.github.com/en/code-security/codeql-cli/getting-started-with-the-codeql-cli/preparing-your-code-for-codeql-analysis#specifying-build-commands)
run: ./gradlew assemble --no-build-cache --no-daemon
- name: Perform CodeQL analysis
- uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15
+ uses: github/codeql-action/analyze@a8d1ac45b9a34d11fe398d5503176af0d06b303e # v3.30.7
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml
new file mode 100644
index 000000000..e167d7c5e
--- /dev/null
+++ b/.github/workflows/copilot-setup-steps.yml
@@ -0,0 +1,34 @@
+# Custom setup steps for GitHub Copilot coding agent to speed up Copilot's work on coding tasks
+name: "Copilot Setup Steps"
+
+on:
+ pull_request:
+ paths:
+ - .github/workflows/copilot-setup-steps.yml
+ push:
+ paths:
+ - .github/workflows/copilot-setup-steps.yml
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+jobs:
+ copilot-setup-steps: # Job name required by GitHub Copilot coding agent
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+
+ - name: Build project and download dependencies
+ run: ./gradlew build -x test
diff --git a/.github/workflows/fossa.yml b/.github/workflows/fossa.yml
index 03d4e5684..4f2c7d5d6 100644
--- a/.github/workflows/fossa.yml
+++ b/.github/workflows/fossa.yml
@@ -12,9 +12,9 @@ jobs:
fossa:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- - uses: fossas/fossa-action@c0a7d013f84c8ee5e910593186598625513cc1e4 # v1.6.0
+ - uses: fossas/fossa-action@3ebcea1862c6ffbd5cf1b4d0bd6b3fe7bd6f2cac # v1.7.0
with:
api-key: ${{secrets.FOSSA_API_KEY}}
team: OpenTelemetry
diff --git a/.github/workflows/gradle-wrapper-validation.yml b/.github/workflows/gradle-wrapper-validation.yml
index 658fbac69..d2e124e7d 100644
--- a/.github/workflows/gradle-wrapper-validation.yml
+++ b/.github/workflows/gradle-wrapper-validation.yml
@@ -15,6 +15,6 @@ jobs:
gradle-wrapper-validation:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- - uses: gradle/actions/wrapper-validation@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
+ - uses: gradle/actions/wrapper-validation@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
diff --git a/.github/workflows/issue-management-feedback-label.yml b/.github/workflows/issue-management-feedback-label.yml
index 35fa82926..49db5efb5 100644
--- a/.github/workflows/issue-management-feedback-label.yml
+++ b/.github/workflows/issue-management-feedback-label.yml
@@ -12,12 +12,13 @@ jobs:
permissions:
contents: read
issues: write
+ pull-requests: write
if: >
contains(github.event.issue.labels.*.name, 'needs author feedback') &&
github.event.comment.user.login == github.event.issue.user.login
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Remove labels
env:
diff --git a/.github/workflows/issue-management-stale-action.yml b/.github/workflows/issue-management-stale-action.yml
index 483df9b15..1cb3de9a5 100644
--- a/.github/workflows/issue-management-stale-action.yml
+++ b/.github/workflows/issue-management-stale-action.yml
@@ -4,6 +4,7 @@ on:
schedule:
# hourly at minute 23
- cron: "23 * * * *"
+ workflow_dispatch:
permissions:
contents: read
@@ -16,21 +17,52 @@ jobs:
pull-requests: write # for actions/stale to close stale PRs
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
+ # Action #1: Handle issues/PRs awaiting author feedback
+ # - After 7 days inactive: Adds "stale" label + warning comment
+ # - After 7 more days inactive: Closes
+ - uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
with:
- repo-token: ${{ secrets.GITHUB_TOKEN }}
+ only-labels: "needs author feedback"
days-before-stale: 7
days-before-close: 7
- only-labels: "needs author feedback"
stale-issue-label: stale
stale-issue-message: >
- This has been automatically marked as stale because it has been marked
- as needing author feedback and has not had any activity for 7 days.
- It will be closed automatically if there is no response from the author
- within 7 additional days from this comment.
+ This issue has been labeled as stale due to lack of activity and needing author feedback.
+ It will be automatically closed if there is no further activity over the next 7 days.
+ stale-pr-label: stale
+ stale-pr-message: >
+ This PR has been labeled as stale due to lack of activity and needing author feedback.
+ It will be automatically closed if there is no further activity over the next 7 days.
+
+ # Action #2: Close old enhancement requests
+ # - Targets: Issues with "enhancement" label (but NOT "needs author feedback")
+ # - After 365 days inactive: Adds "stale" label + closes immediately (no warning period)
+ # - Skips: Issues with "needs author feedback" to avoid conflicts with Action #1
+ - uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
+ with:
+ only-labels: "enhancement"
+ # Skip issues that need author feedback (handled by the first action with 7+7 day policy)
+ exempt-issue-labels: "needs author feedback"
+ days-before-pr-stale: -1
+ days-before-pr-close: -1
+ days-before-issue-stale: 365
+ days-before-issue-close: 0
+ stale-issue-label: stale
+ close-issue-message: >
+ Since there has been no activity on this enhancement for the past year we are closing it to help maintain our backlog.
+ Anyone who would like to work on it is still welcome to do so, and we can re-open it at that time.
+
+ # Action #3: Handle stale PRs
+ # - After 180 days inactive: Adds "stale" label + warning comment
+ # - After 14 more days inactive: Closes
+ - uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
+ with:
+ days-before-issue-stale: -1
+ days-before-issue-close: -1
+ days-before-pr-stale: 90
+ days-before-pr-close: 14
stale-pr-label: stale
stale-pr-message: >
- This has been automatically marked as stale because it has been marked
- as needing author feedback and has not had any activity for 7 days.
- It will be closed automatically if there is no response from the author
- within 7 additional days from this comment.
+ This PR has been labeled as stale due to lack of activity.
+ It will be automatically closed if there is no further activity over the next 14 days.
+ exempt-draft-pr: false
diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml
index 886c27e3e..c7fc5ab9d 100644
--- a/.github/workflows/ossf-scorecard.yml
+++ b/.github/workflows/ossf-scorecard.yml
@@ -19,12 +19,22 @@ jobs:
# Needed for GitHub OIDC token if publish_results is true
id-token: write
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- - uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ id: create-token
with:
+ # analyzing classic branch protections requires a token with admin read permissions
+ # see https://github.com/ossf/scorecard-action/blob/main/docs/authentication/fine-grained-auth-token.md
+ # and https://github.com/open-telemetry/community/issues/2769
+ app-id: ${{ vars.OSSF_SCORECARD_APP_ID }}
+ private-key: ${{ secrets.OSSF_SCORECARD_PRIVATE_KEY }}
+
+ - uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3
+ with:
+ repo_token: ${{ steps.create-token.outputs.token }}
results_file: results.sarif
results_format: sarif
publish_results: true
@@ -42,6 +52,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard (optional).
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@45775bd8235c68ba998cffa5171334d58593da47 # v3.28.15
+ uses: github/codeql-action/upload-sarif@a8d1ac45b9a34d11fe398d5503176af0d06b303e # v3.30.7
with:
sarif_file: results.sarif
diff --git a/.github/workflows/owasp-dependency-check-daily.yml b/.github/workflows/owasp-dependency-check-daily.yml
index dd6708163..c8ddc6b6c 100644
--- a/.github/workflows/owasp-dependency-check-daily.yml
+++ b/.github/workflows/owasp-dependency-check-daily.yml
@@ -15,10 +15,10 @@ jobs:
analyze:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: 17
@@ -27,7 +27,7 @@ jobs:
run: |
sed -i "s/org.gradle.jvmargs=/org.gradle.jvmargs=-Xmx3g /" gradle.properties
- - uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
+ - uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
- run: ./gradlew dependencyCheckAnalyze
env:
diff --git a/.github/workflows/prepare-patch-release.yml b/.github/workflows/prepare-patch-release.yml
index 050fc5a7f..70bfd208a 100644
--- a/.github/workflows/prepare-patch-release.yml
+++ b/.github/workflows/prepare-patch-release.yml
@@ -11,7 +11,7 @@ jobs:
contents: write # for Git to git push
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- run: |
if [[ ! $GITHUB_REF_NAME =~ ^release/v[0-9]+\.[0-9]+\.x$ ]]; then
@@ -47,7 +47,7 @@ jobs:
- name: Use CLA approved bot
run: .github/scripts/use-cla-approved-bot.sh
- - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: otelbot-token
with:
app-id: ${{ vars.OTELBOT_APP_ID }}
diff --git a/.github/workflows/prepare-release-branch.yml b/.github/workflows/prepare-release-branch.yml
index 1e2a00c60..58c5dc8bb 100644
--- a/.github/workflows/prepare-release-branch.yml
+++ b/.github/workflows/prepare-release-branch.yml
@@ -9,7 +9,7 @@ jobs:
prereqs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Verify prerequisites
run: |
@@ -30,7 +30,7 @@ jobs:
needs:
- prereqs
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Create release branch
run: |
@@ -59,7 +59,7 @@ jobs:
- name: Use CLA approved bot
run: .github/scripts/use-cla-approved-bot.sh
- - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: otelbot-token
with:
app-id: ${{ vars.OTELBOT_APP_ID }}
@@ -87,7 +87,7 @@ jobs:
needs:
- prereqs
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set environment variables
run: |
@@ -116,7 +116,7 @@ jobs:
- name: Use CLA approved bot
run: .github/scripts/use-cla-approved-bot.sh
- - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: otelbot-token
with:
app-id: ${{ vars.OTELBOT_APP_ID }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 244b009d5..3ef09e0a2 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,64 +1,25 @@
name: Release
on:
workflow_dispatch:
+ inputs:
+ already-published:
+ description: 'Skip publishing, download artifacts from Maven Central instead'
+ default: false
+ type: boolean
permissions:
contents: read
jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- distribution: temurin
- java-version: 17
-
- - name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- - name: Gradle build
- run: ./gradlew build
-
- - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- name: Save unit test results
- if: always()
- with:
- name: test-results
- path: jmx-metrics/build/reports/tests/test
-
- integration-test:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
- with:
- distribution: temurin
- java-version: 17
-
- - name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- - name: Integration test
- run: ./gradlew integrationTest
-
- - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- name: Save integration test results
- if: always()
- with:
- name: integration-test-results
- path: jmx-metrics/build/reports/tests/integrationTest
+ common:
+ uses: ./.github/workflows/build-common.yml
release:
permissions:
contents: write # for creating the release
runs-on: ubuntu-latest
needs:
- - build
- - integration-test
+ - common
outputs:
version: ${{ steps.create-github-release.outputs.version }}
steps:
@@ -68,7 +29,7 @@ jobs:
exit 1
fi
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set environment variables
run: |
@@ -97,7 +58,7 @@ jobs:
# check out main branch to verify there won't be problems with merging the change log
# at the end of this workflow
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: main
@@ -112,20 +73,22 @@ jobs:
fi
# back to the release branch
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# tags are needed for the generate-release-contributors.sh script
fetch-depth: 0
- name: Set up JDK for running Gradle
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: temurin
java-version: 17
- name: Set up gradle
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+
- name: Build and publish artifacts
+ if: ${{ !inputs.already-published }}
run: ./gradlew assemble publishToSonatype closeAndReleaseSonatypeStagingRepository
env:
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
@@ -133,6 +96,21 @@ jobs:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
GPG_PASSWORD: ${{ secrets.GPG_PASSWORD }}
+ - name: Download artifacts from Maven Central (when already published)
+ if: ${{ inputs.already-published }}
+ run: |
+ mkdir -p jmx-metrics/build/libs
+ mkdir -p jmx-scraper/build/libs
+
+ curl -L -o jmx-metrics/build/libs/opentelemetry-jmx-metrics-$VERSION-alpha.jar \
+ "https://repo1.maven.org/maven2/io/opentelemetry/contrib/opentelemetry-jmx-metrics/$VERSION-alpha/opentelemetry-jmx-metrics-$VERSION-alpha.jar"
+ curl -L -o jmx-metrics/build/libs/opentelemetry-jmx-metrics-$VERSION-alpha.jar.asc \
+ "https://repo1.maven.org/maven2/io/opentelemetry/contrib/opentelemetry-jmx-metrics/$VERSION-alpha/opentelemetry-jmx-metrics-$VERSION-alpha.jar.asc"
+ curl -L -o jmx-scraper/build/libs/opentelemetry-jmx-scraper-$VERSION-alpha.jar \
+ "https://repo1.maven.org/maven2/io/opentelemetry/contrib/opentelemetry-jmx-scraper/$VERSION-alpha/opentelemetry-jmx-scraper-$VERSION-alpha.jar"
+ curl -L -o jmx-scraper/build/libs/opentelemetry-jmx-scraper-$VERSION-alpha.jar.asc \
+ "https://repo1.maven.org/maven2/io/opentelemetry/contrib/opentelemetry-jmx-scraper/$VERSION-alpha/opentelemetry-jmx-scraper-$VERSION-alpha.jar.asc"
+
- name: Generate release notes
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -181,23 +159,30 @@ jobs:
run: |
cp jmx-metrics/build/libs/opentelemetry-jmx-metrics-$VERSION-alpha.jar opentelemetry-jmx-metrics.jar
cp jmx-metrics/build/libs/opentelemetry-jmx-metrics-$VERSION-alpha.jar.asc opentelemetry-jmx-metrics.jar.asc
+ cp jmx-scraper/build/libs/opentelemetry-jmx-scraper-$VERSION-alpha.jar opentelemetry-jmx-scraper.jar
+ cp jmx-scraper/build/libs/opentelemetry-jmx-scraper-$VERSION-alpha.jar.asc opentelemetry-jmx-scraper.jar.asc
+
gh release create --target $GITHUB_REF_NAME \
--title "Version $VERSION" \
--notes-file /tmp/release-notes.txt \
v$VERSION \
opentelemetry-jmx-metrics.jar \
- opentelemetry-jmx-metrics.jar.asc
+ opentelemetry-jmx-metrics.jar.asc \
+ opentelemetry-jmx-scraper.jar \
+ opentelemetry-jmx-scraper.jar.asc
echo "version=$VERSION" >> $GITHUB_OUTPUT
- merge-change-log-to-main:
+ post-release-updates:
permissions:
contents: write # for git push to PR branch
runs-on: ubuntu-latest
needs:
- release
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ # add change log sync (if any) into this PR since the apidiff update
+ # is required before any other PR can be merged anyway
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Copy change log section from release branch
env:
@@ -206,7 +191,7 @@ jobs:
sed -n "0,/^## Version $VERSION /d;/^## Version /q;p" CHANGELOG.md \
> /tmp/changelog-section.md
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: main
@@ -218,10 +203,41 @@ jobs:
release_date=$(gh release view v$VERSION --json publishedAt --jq .publishedAt | sed 's/T.*//')
RELEASE_DATE=$release_date .github/scripts/merge-change-log-after-release.sh
+ - name: Wait for release to be available in maven central
+ env:
+ VERSION: ${{ needs.release.outputs.version }}
+ run: |
+ until curl --silent \
+ --show-error \
+ --output /dev/null \
+ --head \
+ --fail \
+ https://repo1.maven.org/maven2/io/opentelemetry/contrib/opentelemetry-aws-xray/$VERSION/opentelemetry-aws-xray-$VERSION.jar
+ do
+ sleep 60
+ done
+
+ - name: Set up JDK for running Gradle
+ uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
+ with:
+ distribution: temurin
+ java-version: 17
+
+ - name: Set up Gradle
+ uses: gradle/actions/setup-gradle@4d9f0ba0025fe599b4ebab900eb7f3a1d93ef4c2 # v5.0.0
+
+ - name: Update apidiff baseline
+ env:
+ VERSION: ${{ needs.release.outputs.version }}
+ PRIOR_VERSION: ${{ needs.release.outputs.prior-version }}
+ run: |
+ ./gradlew japicmp -PapiBaseVersion=$PRIOR_VERSION -PapiNewVersion=$VERSION
+ ./gradlew --refresh-dependencies japicmp
+
- name: Use CLA approved bot
run: .github/scripts/use-cla-approved-bot.sh
- - uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2.0.2
+ - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: otelbot-token
with:
app-id: ${{ vars.OTELBOT_APP_ID }}
@@ -233,22 +249,14 @@ jobs:
# not using secrets.GITHUB_TOKEN since pull requests from that token do not run workflows
GH_TOKEN: ${{ steps.otelbot-token.outputs.token }}
run: |
- if git diff --quiet; then
- if [[ $VERSION == *.0 ]]; then
- echo there are no updates to merge, not creating pull request
- exit 0 # success
- else
- echo patch release notes did not get applied for some reason
- exit 1 # failure
- fi
- fi
-
- message="Merge change log updates from $GITHUB_REF_NAME"
- body="Merge log updates from \`$GITHUB_REF_NAME\`."
- branch="otelbot/merge-change-log-updates-from-${GITHUB_REF_NAME//\//-}"
+ message="Post-release updates for $VERSION"
+ body="Post-release updates for \`$VERSION\`."
+ branch="otelbot/update-apidiff-baseline-to-released-version-${VERSION}"
git checkout -b $branch
- git commit -a -m "$message"
+ git add CHANGELOG.md
+ git add docs/apidiffs
+ git commit -m "$message"
git push --set-upstream origin $branch
gh pr create --title "$message" \
--body "$body" \
diff --git a/.github/workflows/reusable-link-check.yml b/.github/workflows/reusable-link-check.yml
new file mode 100644
index 000000000..d778dea64
--- /dev/null
+++ b/.github/workflows/reusable-link-check.yml
@@ -0,0 +1,28 @@
+name: Reusable - Link check
+
+on:
+ workflow_call:
+
+permissions:
+ contents: read
+
+jobs:
+ link-check:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ with:
+ fetch-depth: 0 # needed for merge-base used in lint:links-in-modified-files
+
+ - uses: jdx/mise-action@e3d7b8d67a7958d1207f6ed871e83b1ea780e7b0 # v3.3.1
+
+ - name: Link check - relative links (all files)
+ if: github.event_name == 'pull_request'
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: mise run lint:local-links
+
+ - name: Link check (modified files only)
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: mise run lint:links-in-modified-files --base origin/${{ github.base_ref }} --head ${{ github.event.pull_request.head.sha }} --event ${{ github.event_name }}
diff --git a/.github/workflows/reusable-markdown-link-check.yml b/.github/workflows/reusable-markdown-link-check.yml
deleted file mode 100644
index e8692d6a3..000000000
--- a/.github/workflows/reusable-markdown-link-check.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: Reusable - Markdown link check
-
-on:
- workflow_call:
-
-permissions:
- contents: read
-
-jobs:
- markdown-link-check:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - uses: lycheeverse/lychee-action@1d97d84f0bc547f7b25f4c2170d87d810dc2fb2c # v2.4.0
- with:
- # excluding links to pull requests and issues is done for performance
- args: >
- --include-fragments
- --exclude "^https://github.com/open-telemetry/opentelemetry-java-contrib/(issue|pull)/\\d+$"
- --max-retries 6
- .
diff --git a/.github/workflows/reusable-markdown-lint.yml b/.github/workflows/reusable-markdown-lint.yml
index f3688c191..83fc58586 100644
--- a/.github/workflows/reusable-markdown-lint.yml
+++ b/.github/workflows/reusable-markdown-lint.yml
@@ -10,8 +10,8 @@ jobs:
markdown-lint-check:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run markdownlint
run: |
- npx markdownlint-cli@0.44.0 -c .github/config/markdownlint.yml **/*.md
+ npx markdownlint-cli@0.45.0 -c .github/config/markdownlint.yml **/*.md
diff --git a/.github/workflows/reusable-misspell-check.yml b/.github/workflows/reusable-misspell-check.yml
index b1e266cdf..7e8ddaf77 100644
--- a/.github/workflows/reusable-misspell-check.yml
+++ b/.github/workflows/reusable-misspell-check.yml
@@ -10,7 +10,7 @@ jobs:
misspell-check:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install misspell
run: |
diff --git a/.github/workflows/reusable-shell-script-check.yml b/.github/workflows/reusable-shell-script-check.yml
index 4e5f51e0f..b2e403110 100644
--- a/.github/workflows/reusable-shell-script-check.yml
+++ b/.github/workflows/reusable-shell-script-check.yml
@@ -10,7 +10,7 @@ jobs:
shell-script-check:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install shell check
run: wget -qO- "https://github.com/koalaman/shellcheck/releases/download/stable/shellcheck-stable.linux.x86_64.tar.xz" | tar -xJv
diff --git a/.github/workflows/reusable-workflow-notification.yml b/.github/workflows/reusable-workflow-notification.yml
index 701f90f5a..61e8d6267 100644
--- a/.github/workflows/reusable-workflow-notification.yml
+++ b/.github/workflows/reusable-workflow-notification.yml
@@ -19,7 +19,7 @@ jobs:
issues: write
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Open issue or add comment if issue already open
env:
diff --git a/.gitignore b/.gitignore
index 641751d61..4743c99e3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -53,3 +53,6 @@ bin
.swp
.gitpod.yml
+
+# Gradle build scans
+build-scan.txt
diff --git a/.mise/tasks/lint/.shellcheckrc b/.mise/tasks/lint/.shellcheckrc
new file mode 100644
index 000000000..c186fb835
--- /dev/null
+++ b/.mise/tasks/lint/.shellcheckrc
@@ -0,0 +1,3 @@
+# shellcheck configuration for mise tasks
+# SC2154: usage_* variables are set by mise framework
+disable=SC2154
diff --git a/.mise/tasks/lint/links-in-modified-files.sh b/.mise/tasks/lint/links-in-modified-files.sh
new file mode 100755
index 000000000..524cc2d28
--- /dev/null
+++ b/.mise/tasks/lint/links-in-modified-files.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+#MISE description="Lint links in modified files"
+
+set -e
+
+#USAGE flag "--base " help="base branch to compare against (default: origin/main)" default="origin/main"
+#USAGE flag "--head " help="head branch to compare against (empty for local changes) (default: empty)" default=""
+#USAGE flag "--event " help="event name (default: pull_request)" default="pull_request"
+
+if [ "$usage_head" = "''" ]; then
+ usage_head=""
+fi
+
+# Check if lychee config was modified
+# shellcheck disable=SC2086
+# - because usage_head may be empty
+config_modified=$(git diff --name-only --merge-base "$usage_base" $usage_head \
+ | grep -E '^(\.github/config/lychee\.toml|\.mise/tasks/lint/.*|mise\.toml)$' || true)
+
+if [ "$usage_event" != "pull_request" ] ; then
+ echo "Not a PR - checking all files."
+ mise run lint:links
+elif [ -n "$config_modified" ] ; then
+ echo "config changes, checking all files."
+ mise run lint:links
+else
+ # Using lychee's default extension filter here to match when it runs against all files
+ # Note: --diff-filter=d filters out deleted files
+ # shellcheck disable=SC2086
+ # - because usage_head may be empty
+ modified_files=$(git diff --name-only --diff-filter=d "$usage_base" $usage_head \
+ | grep -E '\.(md|mkd|mdx|mdown|mdwn|mkdn|mkdown|markdown|html|htm|txt)$' \
+ | tr '\n' ' ' || true)
+
+ if [ -z "$modified_files" ]; then
+ echo "No modified files, skipping link linting."
+ exit 0
+ fi
+
+ # shellcheck disable=SC2086
+ mise run lint:links $modified_files
+fi
+
diff --git a/.mise/tasks/lint/links.sh b/.mise/tasks/lint/links.sh
new file mode 100755
index 000000000..f5f708535
--- /dev/null
+++ b/.mise/tasks/lint/links.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+#MISE description="Lint links in all files"
+
+set -e
+
+#USAGE arg "" var=#true help="files to check" default="."
+
+for f in $usage_file; do
+ echo "Checking links in file: $f"
+done
+
+# shellcheck disable=SC2086
+lychee --verbose --config .github/config/lychee.toml $usage_file
diff --git a/.mise/tasks/lint/local-links.sh b/.mise/tasks/lint/local-links.sh
new file mode 100755
index 000000000..f16cd3aa5
--- /dev/null
+++ b/.mise/tasks/lint/local-links.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+#MISE description="Lint links in local files"
+
+set -e
+
+#USAGE arg "" var=#true help="files to check" default="."
+
+for f in $usage_file; do
+ echo "Checking links in file: $f"
+done
+
+# shellcheck disable=SC2086
+lychee --verbose --scheme file --include-fragments $usage_file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ca4f7b3ad..1576667f0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,184 @@
## Unreleased
+## Version 1.50.0 (2025-09-26)
+
+Note: This release broadly applies some style guidelines across the repository. As a result,
+some classes that were visible might be package/private. Other non-final classes may now
+be final. See
+[#2182](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2182)
+and
+[#2210](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2210)
+and
+[#2212](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2212)
+and
+[#2213](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2212)
+for examples and details. These changes are not expected to break anyone, so please open
+an issue if this causes problems.
+
+### Baggage processor
+
+- Move baggage processor to the front of the processor list
+ ([#2152](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2152))
+- Add declarative configuration support
+ ([#2031](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2031))
+
+### Disk buffering
+
+- Catching IllegalStateException in case of failed deserialization
+ ([#2157](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2157))
+- Apply final to public API classes where possible
+ ([#2216](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2216))
+- Handle empty attribute values
+ ([#2268](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2268))
+
+### Inferred spans
+
+- Support dynamically changing the inferred span interval
+ ([#2153](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2153))
+
+### JMX scraper
+
+- Implement stable `service.instance.id`
+ ([#2270](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2270))
+
+### Kafka exporter
+
+- Add Kafka connectivity error handling
+ ([#2202](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2202))
+
+### OpAMP client
+
+- Move important user-facing classes out of 'internal' package
+ ([#2249](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2249))
+- Exponential backoff retries on http connection failures
+ ([#2274](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2274))
+
+### Span stack traces
+
+- Add declarative configuration support
+ ([#2262](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2262))
+
+
+## Version 1.49.0 (2025-08-25)
+
+### Consistent sampling
+
+- Add updateable threshold sampler for dynamic sampling configuration
+ ([#2137](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2137))
+
+### Disk buffering
+
+- Introduce API changes for improved disk buffering functionality
+ ([#2084](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2084))
+- Implement more efficient serializer with direct disk write capabilities
+ ([#2138](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2138))
+
+### IBM MQ metrics - New 🌟
+
+IBM MQ metrics collection utility.
+
+### Inferred spans
+
+- Update async profiler to version 4.1 for improved performance
+ ([#2096](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2096))
+
+### OpAMP client - New 🌟
+
+OpenTelemetry Agent Management Protocol (OpAMP) client implementation.
+
+## Version 1.48.0 (2025-07-23)
+
+### AWS resources
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+### AWS X-Ray SDK support
+
+- Update SamplerRulesApplier to recognize new HTTP/URL semconv
+ ([#1959](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1959))
+
+### Azure resources
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+### CloudFoundry resources
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+### Consistent sampling
+
+- Refactor ConsistentFixedThresholdSampler to prepare for dynamic threshold support
+ ([#2018](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2018))
+- ConsistentRateLimitingSampler can fail if used in combination with legacy samplers
+ ([#2022](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2022))
+
+### GCP resources
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+### JMX metrics
+
+- Deprecate JMX Gatherer and provide migration guide to JMX Scraper
+ ([#2034](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2034))
+
+### JMX scraper
+
+- Update Jetty metrics configuration corresponding to Java Instrumentation 2.18.0
+ ([#2033](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2033))
+- Mark as production-ready and remove experimental status
+ ([#2034](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2034))
+
+### Maven extension
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+### Resource providers
+
+- Support for declarative configuration
+ ([#2014](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/2014))
+
+## Version 1.47.0 (2025-07-04)
+
+### Disk buffering
+
+- Shared storage
+ ([#1912](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1912))
+- Implementing ExtendedLogRecordData
+ ([#1918](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1918))
+- Add missing EventName to disk-buffering LogRecordDataMapper
+ ([#1950](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1950))
+
+### GCP authentication extension
+
+- Update the internal implementation such that the required headers are retrieved
+ from the Google Auth Library instead of manually constructing and passing them.
+ ([#1860](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1860))
+- Add metrics support to auth extension
+ ([#1891](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1891))
+- Update ConfigurableOptions to read from ConfigProperties
+ ([#1904](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1904))
+
+### Inferred spans
+
+- Upgrade async-profiler to 4.0
+ ([#1872](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1872))
+
+### Kafka exporter
+
+- Upgrade kafka-clients to 4.0 (and so now requires Java 11+)
+ ([#1802](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1802))
+
+### Maven extension
+
+- Add option to record transferred artifacts
+ ([#1875](https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1875))
+
## Version 1.46.0 (2025-04-11)
### Baggage processor
@@ -151,7 +329,7 @@ The extension takes care of the necessary configuration required to authenticate
The future of the [JMX metrics](./jmx-metrics/README.md) component,
built on top of the
-[JMX metrics](https://github.com/open-telemetry/opentelemetry-java-instrumentation/tree/main/instrumentation/jmx-metrics/javaagent#jmx-metric-insight)
+[JMX metrics](https://github.com/open-telemetry/opentelemetry-java-instrumentation/blob/main/instrumentation/jmx-metrics/README.md#jmx-metric-insight)
component from the opentelemetry-java-instrumentation repository.
### Maven extension
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index f482397c7..04a50c300 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,82 +1,65 @@
# Contributing
-Welcome to the OpenTelemetry Java Contrib Repository!
+Welcome to the OpenTelemetry Java Contrib repository!
## Introduction
-This repository focuses on providing tools and utilities for Java-based observability, such as remote JMX metric gathering and reporting. We’re excited to have you here! Whether you’re fixing a bug, adding a feature, or suggesting an idea, your contributions are invaluable.
+This repository provides observability libraries and utilities for Java applications that complement
+the [OpenTelemetry Java SDK](https://github.com/open-telemetry/opentelemetry-java) and
+[OpenTelemetry Java Instrumentation](https://github.com/open-telemetry/opentelemetry-java-instrumentation)
+projects.
-Before submitting new features or changes to current functionality, it is recommended to first
-[open an issue](https://github.com/open-telemetry/opentelemetry-java-contrib/issues/new)
-and discuss your ideas or propose the changes you wish to make.
-
-Questions? Ask in the OpenTelemetry [java channel](https://cloud-native.slack.com/archives/C014L2KCTE3)
+Before submitting new features or changes, please consider
+[opening an issue](https://github.com/open-telemetry/opentelemetry-java-contrib/issues/new) first to
+discuss your ideas.
Pull requests for bug fixes are always welcome!
-## Pre-requisites
-
-To work with this repository, ensure you have:
-
-### Tools:
-
-Java 17 or higher
-
-### Platform Notes:
-
-macOS/Linux: Ensure JAVA_HOME is set correctly.
-
-## Workflow
-
-1. Fork the repository
-2. Clone locally
-3. Create a branch before working on an issue
-
-## Local Run/Build
+## Building and Testing
-In order to build and test this whole repository you need JDK 11+.
+While most modules target Java 8, building this project requires Java 17 or higher.
-#### Snapshot builds
-
-For developers testing code changes before a release is complete, there are
-snapshot builds of the `main` branch. They are available from
-the Sonatype OSS snapshots repository at `https://oss.sonatype.org/content/repositories/snapshots/`
-([browse](https://oss.sonatype.org/content/repositories/snapshots/io/opentelemetry/contrib/))
-
-#### Building from source
-
-Building using Java 11+:
+To build the project:
```bash
-$ java -version
+./gradlew assemble
```
+To run the tests:
+
```bash
-$ ./gradlew assemble
+./gradlew test
```
-## Testing
+Some modules include integration tests that can be run with:
```bash
-$ ./gradlew test
+./gradlew integrationTest
```
-### Some modules have integration tests
+## Snapshot Builds
-```
-$ ./gradlew integrationTest
-```
+Snapshot builds of the `main` branch are available from the Sonatype snapshot repository at:
+`https://central.sonatype.com/repository/maven-snapshots/`
+([browse](https://central.sonatype.com/service/rest/repository/browse/maven-snapshots/io/opentelemetry/contrib/)).
+
+## Style Guide
+
+See [Style Guide](docs/style-guide.md).
-Follow the Java Instrumentation [Style Guide](https://github.com/open-telemetry/opentelemetry-java-instrumentation/blob/main/docs/contributing/style-guideline.md) from the opentelemetry-java-instrumentation repository.
+## Pull Request Guidelines
-Failure? Check logs for errors or mismatched dependencies.
+When submitting a pull request, please ensure that you:
-## Gradle conventions
+- Clearly describe the change and its motivation
+- Mention any breaking changes
+- Include tests for new functionality
+- Follow the [Style Guide](docs/style-guide.md)
-- Use kotlin instead of groovy
-- Plugin versions should be specified in `settings.gradle.kts`, not in individual modules
-- All modules use `plugins { id("otel.java-conventions") }`
+## Getting Help
-## Further Help
+If you need assistance or have questions:
-Join [#otel-java](https://cloud-native.slack.com/archives/C014L2KCTE3) on OpenTelemetry Slack
+- Post on the [#otel-java](https://cloud-native.slack.com/archives/C014L2KCTE3) Slack channel
+- [Open an issue](https://github.com/open-telemetry/opentelemetry-java-contrib/issues/new/choose) in
+ this repository
diff --git a/README.md b/README.md
index 33eb10f34..63dd49369 100644
--- a/README.md
+++ b/README.md
@@ -26,6 +26,7 @@ feature or via instrumentation, this project is hopefully for you.
| alpha | [GCP Authentication Extension](./gcp-auth-extension/README.md) |
| beta | [GCP Resources](./gcp-resources/README.md) |
| beta | [Inferred Spans](./inferred-spans/README.md) |
+| alpha | [IBM MQ Metrics](./ibm-mq-metrics/README.md) |
| alpha | [JFR Connection](./jfr-connection/README.md) |
| alpha | [JFR Events](./jfr-events/README.md) |
| alpha | [JMX Metric Gatherer](./jmx-metrics/README.md) |
@@ -47,56 +48,36 @@ On reaching stable status, the `otel.stable` value in `gradle.properties` should
Note that currently all the libraries are released together with the version of this repo, so breaking changes (after stable
status is reached) would bump the major version of all libraries together. This could get complicated so `stable` has a high bar.
-## Getting Started
-
-```bash
-# Apply formatting
-$ ./gradlew spotlessApply
-
-# Build the complete project
-$ ./gradlew build
-
-# Run integration tests
-$ ./gradlew integrationTest
-
-# Clean artifacts
-$ ./gradlew clean
-```
-
## Contributing
-The Java Contrib project was initially formed to provide methods of easy remote JMX metric gathering and reporting,
-which is actively in development. If you have an idea for a similar use case in the metrics, traces, or logging
-domain we would be very interested in supporting it. Please
-[open an issue](https://github.com/open-telemetry/opentelemetry-java-contrib/issues/new/choose) to share your idea or
-suggestion. PRs are always welcome and greatly appreciated, but for larger functional changes a pre-coding introduction
-can be helpful to ensure this is the correct place and that active or conflicting efforts don't exist.
+See [CONTRIBUTING.md](CONTRIBUTING.md).
-Triagers ([@open-telemetry/java-contrib-triagers](https://github.com/orgs/open-telemetry/teams/java-contrib-triagers)):
+### Maintainers
-- All [component owners](https://github.com/open-telemetry/opentelemetry-java-contrib/blob/main/.github.amrom.workers.devponent_owners.yml) are given Triager permissions to this repository.
+- [Jack Berg](https://github.com/jack-berg), Grafana Labs
+- [Jason Plumb](https://github.com/breedx-splk), Splunk
+- [Lauri Tulmin](https://github.com/laurit), Splunk
+- [Trask Stalnaker](https://github.com/trask), Microsoft
-Approvers ([@open-telemetry/java-contrib-approvers](https://github.com/orgs/open-telemetry/teams/java-contrib-approvers)):
+For more information about the maintainer role, see the [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#maintainer).
-- [John Watson](https://github.com/jkwatson), Verta.ai
+### Approvers
-Maintainers ([@open-telemetry/java-contrib-maintainers](https://github.com/orgs/open-telemetry/teams/java-contrib-maintainers)):
+- [Jay DeLuca](https://github.com/jaydeluca), Grafana Labs
+- [John Watson](https://github.com/jkwatson), Cloudera
-- [Jack Berg](https://github.com/jack-berg), New Relic
-- [Jason Plumb](https://github.com/breedx-splk), Splunk
-- [Lauri Tulmin](https://github.com/laurit), Splunk
-- [Trask Stalnaker](https://github.com/trask), Microsoft
+For more information about the approver role, see the [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#approver).
-Emeritus maintainers:
+### Emeritus maintainers
- [Mateusz Rzeszutek](https://github.com/mateuszrzeszutek)
- [Nikita Salnikov-Tarnovski](https://github.com/iNikem)
- [Ryan Fitzpatrick](https://github.com/rmfitzpatrick)
-Learn more about roles in the [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md).
+For more information about the emeritus role, see the [community repository](https://github.com/open-telemetry/community/blob/main/guides/contributor/membership.md#emeritus-maintainerapprovertriager).
-Thanks to all the people who already contributed!
+### Thanks to all of our contributors!
-
+
diff --git a/RELEASING.md b/RELEASING.md
index afb1b8162..70d044163 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -7,8 +7,8 @@ The version is specified in [version.gradle.kts](version.gradle.kts).
## Snapshot builds
Every successful CI build of the main branch automatically executes `./gradlew publishToSonatype`
-as the last step, which publishes a snapshot build to
-[Sonatype OSS snapshots repository](https://oss.sonatype.org/content/repositories/snapshots/io/opentelemetry/contrib/).
+as the last step, which publishes a snapshot build to the
+[Sonatype snapshot repository](https://central.sonatype.com/service/rest/repository/browse/maven-snapshots/io/opentelemetry/contrib/).
## Release cadence
diff --git a/all/README.md b/all/README.md
deleted file mode 100644
index dca83da7b..000000000
--- a/all/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# opentelemetry-contrib-all (utility project)
-
-This is a utility project which depends on all other projects in this repository.
-We use it for collecting all coverage reports from all modules for uploading to codecov.
diff --git a/all/build.gradle.kts b/all/build.gradle.kts
deleted file mode 100644
index 58f0f6d72..000000000
--- a/all/build.gradle.kts
+++ /dev/null
@@ -1,64 +0,0 @@
-plugins {
- `jacoco-report-aggregation`
-
- id("otel.java-conventions")
-}
-
-description = "OpenTelemetry Contrib All"
-
-dependencies {
- rootProject.subprojects.forEach { subproject ->
- // Generate aggregate coverage report for published modules that enable jacoco.
- subproject.plugins.withId("jacoco") {
- subproject.plugins.withId("maven-publish") {
- // TODO(anuraaga): Figure out how to avoid transitive dependencies being pulled into jacoco due to the use
- // of shadow plugin.
- if (subproject.name != "jmx-metrics") {
- implementation(project(subproject.path)) {
- isTransitive = false
- }
- }
- }
- }
- }
-}
-
-tasks {
- // We don't compile anything here. This project is mostly for
- // aggregating jacoco reports and it doesn't work if this isn't at least as high as the
- // highest supported Java version in any of our projects. Most of our projects target
- // Java 8, but some target Java 11 or 17.
- withType(JavaCompile::class) {
- options.release.set(17)
- }
-}
-
-afterEvaluate {
- tasks {
- testCodeCoverageReport {
- classDirectories.setFrom(
- classDirectories.files.map {
- zipTree(it).filter {
- // Exclude mrjar (jacoco complains), shaded, and generated code
- !it.absolutePath.contains("META-INF/versions/") &&
- !it.absolutePath.contains("AutoValue_")
- }
- },
- )
-
- reports {
- // xml is usually used to integrate code coverage with
- // other tools like SonarQube, Coveralls or Codecov
- xml.required.set(true)
-
- // HTML reports can be used to see code coverage
- // without any external tools
- html.required.set(true)
- }
- }
- }
-}
-
-dependencyCheck {
- skip = true
-}
diff --git a/aws-resources/build.gradle.kts b/aws-resources/build.gradle.kts
index ede0dad59..8c56b17df 100644
--- a/aws-resources/build.gradle.kts
+++ b/aws-resources/build.gradle.kts
@@ -9,6 +9,7 @@ otelJava.moduleName.set("io.opentelemetry.contrib.aws.resource")
dependencies {
api("io.opentelemetry:opentelemetry-api")
+ compileOnly("io.opentelemetry:opentelemetry-api-incubator")
api("io.opentelemetry:opentelemetry-sdk")
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
@@ -20,10 +21,24 @@ dependencies {
implementation("com.squareup.okhttp3:okhttp")
testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure")
+ testImplementation("io.opentelemetry:opentelemetry-sdk-extension-incubator")
+ testImplementation("io.opentelemetry:opentelemetry-api-incubator")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
+ testImplementation("io.opentelemetry:opentelemetry-exporter-logging")
testImplementation("com.linecorp.armeria:armeria-junit5")
testRuntimeOnly("org.bouncycastle:bcpkix-jdk15on")
testImplementation("com.google.guava:guava")
testImplementation("org.skyscreamer:jsonassert")
}
+
+tasks {
+ withType().configureEach {
+ environment(
+ "AWS_REGION" to "us-east-1",
+ "AWS_LAMBDA_FUNCTION_NAME" to "my-function",
+ "AWS_LAMBDA_FUNCTION_VERSION" to "1.2.3"
+ )
+ jvmArgs("-Dotel.experimental.config.file=${project.projectDir.resolve("src/test/resources/declarative-config.yaml")}")
+ }
+}
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/BeanstalkResource.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/BeanstalkResource.java
index 9f294d4fb..7441b7c81 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/BeanstalkResource.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/BeanstalkResource.java
@@ -12,6 +12,7 @@
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.SERVICE_INSTANCE_ID;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.SERVICE_NAMESPACE;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_VERSION;
+import static java.util.logging.Level.WARNING;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
@@ -22,7 +23,6 @@
import io.opentelemetry.semconv.SchemaUrls;
import java.io.File;
import java.io.IOException;
-import java.util.logging.Level;
import java.util.logging.Logger;
/**
@@ -65,7 +65,7 @@ static Resource buildResource(String configPath) {
parser.nextToken();
if (!parser.isExpectedStartObjectToken()) {
- logger.log(Level.WARNING, "Invalid Beanstalk config: ", configPath);
+ logger.log(WARNING, "Invalid Beanstalk config: ", configPath);
return Resource.create(attrBuilders.build(), SchemaUrls.V1_25_0);
}
@@ -87,7 +87,7 @@ static Resource buildResource(String configPath) {
}
}
} catch (IOException e) {
- logger.log(Level.WARNING, "Could not parse Beanstalk config.", e);
+ logger.log(WARNING, "Could not parse Beanstalk config.", e);
return Resource.empty();
}
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/DockerHelper.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/DockerHelper.java
index 5b8aefeac..a52cf1c18 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/DockerHelper.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/DockerHelper.java
@@ -5,11 +5,12 @@
package io.opentelemetry.contrib.aws.resource;
+import static java.util.logging.Level.WARNING;
+
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
-import java.util.logging.Level;
import java.util.logging.Logger;
class DockerHelper {
@@ -44,9 +45,9 @@ public String getContainerId() {
}
}
} catch (FileNotFoundException e) {
- logger.log(Level.WARNING, "Failed to read container id, cgroup file does not exist.");
+ logger.log(WARNING, "Failed to read container id, cgroup file does not exist.");
} catch (IOException e) {
- logger.log(Level.WARNING, "Unable to read container id: " + e.getMessage());
+ logger.log(WARNING, "Unable to read container id: " + e.getMessage());
}
return "";
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/Ec2Resource.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/Ec2Resource.java
index d4bdb4228..5aa930b1c 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/Ec2Resource.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/Ec2Resource.java
@@ -15,6 +15,7 @@
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.HOST_IMAGE_ID;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.HOST_NAME;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.HOST_TYPE;
+import static java.util.logging.Level.WARNING;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
@@ -28,7 +29,6 @@
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
/**
@@ -125,7 +125,7 @@ static Resource buildResource(String endpoint) {
}
}
} catch (IOException e) {
- logger.log(Level.WARNING, "Could not parse identity document, resource not filled.", e);
+ logger.log(WARNING, "Could not parse identity document, resource not filled.", e);
return Resource.empty();
}
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EcsResource.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EcsResource.java
index de6d50afe..83440819b 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EcsResource.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EcsResource.java
@@ -26,6 +26,9 @@
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.CONTAINER_NAME;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.CloudPlatformIncubatingValues.AWS_ECS;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.CloudProviderIncubatingValues.AWS;
+import static java.util.Collections.emptyMap;
+import static java.util.Collections.singletonList;
+import static java.util.logging.Level.WARNING;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
@@ -35,11 +38,9 @@
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.semconv.SchemaUrls;
import java.io.IOException;
-import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
-import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -88,7 +89,7 @@ static Resource buildResource(Map sysEnv, SimpleHttpClient httpC
static void fetchMetadata(
SimpleHttpClient httpClient, String url, AttributesBuilder attrBuilders) {
- String json = httpClient.fetchString("GET", url, Collections.emptyMap(), null);
+ String json = httpClient.fetchString("GET", url, emptyMap(), null);
if (json.isEmpty()) {
return;
}
@@ -103,17 +104,17 @@ static void fetchMetadata(
.getLogGroupArn()
.ifPresent(
logGroupArn -> {
- attrBuilders.put(AWS_LOG_GROUP_ARNS, Collections.singletonList(logGroupArn));
+ attrBuilders.put(AWS_LOG_GROUP_ARNS, singletonList(logGroupArn));
});
logArnBuilder
.getLogStreamArn()
.ifPresent(
logStreamArn -> {
- attrBuilders.put(AWS_LOG_STREAM_ARNS, Collections.singletonList(logStreamArn));
+ attrBuilders.put(AWS_LOG_STREAM_ARNS, singletonList(logStreamArn));
});
} catch (IOException e) {
- logger.log(Level.WARNING, "Can't get ECS metadata", e);
+ logger.log(WARNING, "Can't get ECS metadata", e);
}
}
@@ -156,7 +157,7 @@ static void parseResponse(
JsonParser parser, AttributesBuilder attrBuilders, LogArnBuilder logArnBuilder)
throws IOException {
if (!parser.isExpectedStartObjectToken()) {
- logger.log(Level.WARNING, "Couldn't parse ECS metadata, invalid JSON");
+ logger.log(WARNING, "Couldn't parse ECS metadata, invalid JSON");
return;
}
@@ -314,7 +315,7 @@ private static class DockerImage {
private static final Pattern imagePattern =
Pattern.compile(
- "^(?([^/\\s]+/)?([^:\\s]+))(:(?[^@\\s]+))?(@sha256:(?\\d+))?$");
+ "^(?([^/\\s]+/)?([^:\\s]+))(:(?[^@\\s]+))?(@sha256:(?[\\da-fA-F]+))?$");
final String repository;
final String tag;
@@ -339,7 +340,7 @@ static DockerImage parse(@Nullable String image) {
}
Matcher matcher = imagePattern.matcher(image);
if (!matcher.matches()) {
- logger.log(Level.WARNING, "Couldn't parse image '" + image + "'");
+ logger.log(WARNING, "Couldn't parse image '" + image + "'");
return null;
}
String repository = matcher.group("repository");
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EksResource.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EksResource.java
index 8ed3fb512..156755446 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EksResource.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/EksResource.java
@@ -11,6 +11,8 @@
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.CloudPlatformIncubatingValues.AWS_EKS;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.CloudProviderIncubatingValues.AWS;
import static io.opentelemetry.contrib.aws.resource.IncubatingAttributes.K8S_CLUSTER_NAME;
+import static java.util.logging.Level.FINE;
+import static java.util.logging.Level.WARNING;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
@@ -26,7 +28,6 @@
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
/**
@@ -91,7 +92,7 @@ static Resource buildResource(
private static boolean isEks(
String k8sTokenPath, String k8sKeystorePath, SimpleHttpClient httpClient) {
if (!isK8s(k8sTokenPath, k8sKeystorePath)) {
- logger.log(Level.FINE, "Not running on k8s.");
+ logger.log(FINE, "Not running on k8s.");
return false;
}
@@ -145,7 +146,7 @@ private static String getClusterName(SimpleHttpClient httpClient) {
}
}
} catch (IOException e) {
- logger.log(Level.WARNING, "Can't get cluster name on EKS.", e);
+ logger.log(WARNING, "Can't get cluster name on EKS.", e);
}
return "";
}
@@ -156,7 +157,7 @@ private static String getK8sCredHeader() {
new String(Files.readAllBytes(Paths.get(K8S_TOKEN_PATH)), StandardCharsets.UTF_8);
return "Bearer " + content;
} catch (IOException e) {
- logger.log(Level.WARNING, "Unable to load K8s client token.", e);
+ logger.log(WARNING, "Unable to load K8s client token.", e);
}
return "";
}
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/SimpleHttpClient.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/SimpleHttpClient.java
index 12bc6e34e..f78719d99 100644
--- a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/SimpleHttpClient.java
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/SimpleHttpClient.java
@@ -5,6 +5,9 @@
package io.opentelemetry.contrib.aws.resource;
+import static java.util.logging.Level.FINE;
+import static java.util.logging.Level.WARNING;
+
import java.io.FileInputStream;
import java.io.IOException;
import java.security.KeyStore;
@@ -13,7 +16,6 @@
import java.time.Duration;
import java.util.Collection;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import javax.net.ssl.SSLContext;
@@ -72,7 +74,7 @@ public String fetchString(
int responseCode = response.code();
if (responseCode != 200) {
logger.log(
- Level.FINE,
+ FINE,
"Error response from "
+ urlStr
+ " code ("
@@ -84,7 +86,7 @@ public String fetchString(
ResponseBody body = response.body();
return body != null ? body.string() : "";
} catch (IOException e) {
- logger.log(Level.FINE, "SimpleHttpClient fetch string failed.", e);
+ logger.log(FINE, "SimpleHttpClient fetch string failed.", e);
}
return "";
@@ -101,7 +103,7 @@ private static X509TrustManager buildTrustManager(@Nullable KeyStore keyStore) {
tmf.init(keyStore);
return (X509TrustManager) tmf.getTrustManagers()[0];
} catch (Exception e) {
- logger.log(Level.WARNING, "Build SslSocketFactory for K8s restful client exception.", e);
+ logger.log(WARNING, "Build SslSocketFactory for K8s restful client exception.", e);
return null;
}
}
@@ -117,7 +119,7 @@ private static SSLSocketFactory buildSslSocketFactory(@Nullable TrustManager tru
return context.getSocketFactory();
} catch (Exception e) {
- logger.log(Level.WARNING, "Build SslSocketFactory for K8s restful client exception.", e);
+ logger.log(WARNING, "Build SslSocketFactory for K8s restful client exception.", e);
}
return null;
}
@@ -138,7 +140,7 @@ private static KeyStore getKeystoreForTrustedCert(String certPath) {
}
return trustStore;
} catch (Exception e) {
- logger.log(Level.WARNING, "Cannot load KeyStore from " + certPath);
+ logger.log(WARNING, "Cannot load KeyStore from " + certPath);
return null;
}
}
diff --git a/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/internal/AwsResourceDetector.java b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/internal/AwsResourceDetector.java
new file mode 100644
index 000000000..ae4255570
--- /dev/null
+++ b/aws-resources/src/main/java/io/opentelemetry/contrib/aws/resource/internal/AwsResourceDetector.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.aws.resource.internal;
+
+import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
+import io.opentelemetry.contrib.aws.resource.BeanstalkResource;
+import io.opentelemetry.contrib.aws.resource.Ec2Resource;
+import io.opentelemetry.contrib.aws.resource.EcsResource;
+import io.opentelemetry.contrib.aws.resource.EksResource;
+import io.opentelemetry.contrib.aws.resource.LambdaResource;
+import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider;
+import io.opentelemetry.sdk.resources.Resource;
+import io.opentelemetry.sdk.resources.ResourceBuilder;
+
+public class AwsResourceDetector implements ComponentProvider {
+
+ @Override
+ public Class getType() {
+ return Resource.class;
+ }
+
+ @Override
+ public String getName() {
+ return "aws";
+ }
+
+ @Override
+ public Resource create(DeclarativeConfigProperties config) {
+ ResourceBuilder builder = Resource.builder();
+ builder.putAll(BeanstalkResource.get());
+ builder.putAll(Ec2Resource.get());
+ builder.putAll(EcsResource.get());
+ builder.putAll(EksResource.get());
+ builder.putAll(LambdaResource.get());
+ return builder.build();
+ }
+}
diff --git a/aws-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/aws-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
new file mode 100644
index 000000000..ea6d743f4
--- /dev/null
+++ b/aws-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
@@ -0,0 +1 @@
+io.opentelemetry.contrib.aws.resource.internal.AwsResourceDetector
diff --git a/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/EksResourceTest.java b/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/EksResourceTest.java
index 7eaec5e55..31b05be57 100644
--- a/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/EksResourceTest.java
+++ b/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/EksResourceTest.java
@@ -34,7 +34,7 @@
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
-public class EksResourceTest {
+class EksResourceTest {
@Mock private DockerHelper mockDockerHelper;
diff --git a/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/ResourceComponentProviderTest.java b/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/ResourceComponentProviderTest.java
new file mode 100644
index 000000000..51e21854b
--- /dev/null
+++ b/aws-resources/src/test/java/io/opentelemetry/contrib/aws/resource/ResourceComponentProviderTest.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.aws.resource;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk;
+import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
+import io.opentelemetry.semconv.incubating.CloudIncubatingAttributes;
+import org.assertj.core.api.InstanceOfAssertFactory;
+import org.junit.jupiter.api.Test;
+
+class ResourceComponentProviderTest {
+ @Test
+ void endToEnd() {
+ assertThat(
+ AutoConfiguredOpenTelemetrySdk.builder()
+ .build()
+ .getOpenTelemetrySdk()
+ .getSdkTracerProvider())
+ .extracting("sharedState")
+ .extracting("resource")
+ .extracting(
+ "attributes",
+ new InstanceOfAssertFactory<>(Attributes.class, OpenTelemetryAssertions::assertThat))
+ .containsEntry(
+ CloudIncubatingAttributes.CLOUD_PROVIDER,
+ CloudIncubatingAttributes.CloudProviderIncubatingValues.AWS);
+ }
+}
diff --git a/aws-resources/src/test/resources/declarative-config.yaml b/aws-resources/src/test/resources/declarative-config.yaml
new file mode 100644
index 000000000..da52af7d7
--- /dev/null
+++ b/aws-resources/src/test/resources/declarative-config.yaml
@@ -0,0 +1,10 @@
+file_format: "1.0-rc.1"
+resource:
+ detection/development:
+ detectors:
+ - aws:
+tracer_provider:
+ processors:
+ - simple:
+ exporter:
+ console:
diff --git a/aws-resources/src/test/resources/ecs-container-metadata-v3.json b/aws-resources/src/test/resources/ecs-container-metadata-v3.json
index 2e89ffe59..f7cc0f53f 100644
--- a/aws-resources/src/test/resources/ecs-container-metadata-v3.json
+++ b/aws-resources/src/test/resources/ecs-container-metadata-v3.json
@@ -2,7 +2,7 @@
"DockerId": "43481a6ce4842eec8fe72fc28500c6b52edcc0917f105b83379f88cac1ff3946",
"Name": "nginx-curl",
"DockerName": "ecs-nginx-5-nginx-curl-ccccb9f49db0dfe0d901",
- "Image": "nrdlngr/nginx-curl",
+ "Image": "nrdlngr/nginx-curl:latest@sha256:8dc35e9386b5d280d285ae7a78d271a5d4a82106cb254fbed5fde4923faa8deb",
"ImageID": "sha256:2e00ae64383cfc865ba0a2ba37f61b50a120d2d9378559dcd458dc0de47bc165",
"Labels": {
"com.amazonaws.ecs.cluster": "default",
@@ -28,4 +28,4 @@
]
}
]
-}
\ No newline at end of file
+}
diff --git a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayLambdaPropagator.java b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayLambdaPropagator.java
index a6b6a2ab4..b34bc961c 100644
--- a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayLambdaPropagator.java
+++ b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayLambdaPropagator.java
@@ -5,12 +5,13 @@
package io.opentelemetry.contrib.awsxray.propagator;
+import static java.util.Collections.singletonMap;
+
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.propagation.TextMapGetter;
import io.opentelemetry.context.propagation.TextMapPropagator;
import io.opentelemetry.context.propagation.TextMapSetter;
-import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -75,7 +76,7 @@ public Context extract(Context context, @Nullable C carrier, TextMapGetter FIELDS = Collections.singletonList(TRACE_HEADER_KEY);
+ private static final List FIELDS = singletonList(TRACE_HEADER_KEY);
private static final AwsXrayPropagator INSTANCE = new AwsXrayPropagator();
diff --git a/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayCompositePropagatorTest.java b/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayCompositePropagatorTest.java
index 53e806fdc..99998d974 100644
--- a/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayCompositePropagatorTest.java
+++ b/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayCompositePropagatorTest.java
@@ -15,7 +15,7 @@
import java.util.LinkedHashMap;
import org.junit.jupiter.api.Test;
-public class AwsXrayCompositePropagatorTest extends AwsXrayPropagatorTest {
+class AwsXrayCompositePropagatorTest extends AwsXrayPropagatorTest {
@Override
TextMapPropagator propagator() {
diff --git a/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/internal/AwsComponentProviderTest.java b/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/internal/AwsComponentProviderTest.java
index 1d1590d3a..5bcd62137 100644
--- a/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/internal/AwsComponentProviderTest.java
+++ b/aws-xray-propagator/src/test/java/io/opentelemetry/contrib/awsxray/propagator/internal/AwsComponentProviderTest.java
@@ -20,7 +20,12 @@ class AwsComponentProviderTest {
@Test
void endToEnd() {
- String yaml = "file_format: 0.3\n" + "propagator:\n" + " composite: [xray, xray-lambda]\n";
+ String yaml =
+ "file_format: 1.0-rc.1\n"
+ + "propagator:\n"
+ + " composite:\n"
+ + " - xray:\n"
+ + " - xray-lambda:\n";
OpenTelemetrySdk openTelemetrySdk =
DeclarativeConfiguration.parseAndCreate(
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsAttributeKeys.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsAttributeKeys.java
index 101641f08..c9e762d63 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsAttributeKeys.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsAttributeKeys.java
@@ -5,6 +5,8 @@
package io.opentelemetry.contrib.awsxray;
+import static io.opentelemetry.api.common.AttributeKey.stringKey;
+
import io.opentelemetry.api.common.AttributeKey;
/** Utility class holding attribute keys with special meaning to AWS components */
@@ -12,28 +14,25 @@ final class AwsAttributeKeys {
private AwsAttributeKeys() {}
- static final AttributeKey AWS_SPAN_KIND = AttributeKey.stringKey("aws.span.kind");
+ static final AttributeKey AWS_SPAN_KIND = stringKey("aws.span.kind");
- static final AttributeKey AWS_LOCAL_SERVICE = AttributeKey.stringKey("aws.local.service");
+ static final AttributeKey AWS_LOCAL_SERVICE = stringKey("aws.local.service");
- static final AttributeKey AWS_LOCAL_OPERATION =
- AttributeKey.stringKey("aws.local.operation");
+ static final AttributeKey AWS_LOCAL_OPERATION = stringKey("aws.local.operation");
- static final AttributeKey AWS_REMOTE_SERVICE =
- AttributeKey.stringKey("aws.remote.service");
+ static final AttributeKey AWS_REMOTE_SERVICE = stringKey("aws.remote.service");
- static final AttributeKey AWS_REMOTE_OPERATION =
- AttributeKey.stringKey("aws.remote.operation");
+ static final AttributeKey AWS_REMOTE_OPERATION = stringKey("aws.remote.operation");
- static final AttributeKey AWS_REMOTE_TARGET = AttributeKey.stringKey("aws.remote.target");
+ static final AttributeKey AWS_REMOTE_TARGET = stringKey("aws.remote.target");
// use the same AWS Resource attribute name defined by OTel java auto-instr for aws_sdk_v_1_1
// TODO: all AWS specific attributes should be defined in semconv package and reused cross all
// otel packages. Related sim -
// https://github.com/open-telemetry/opentelemetry-java-instrumentation/issues/8710
- static final AttributeKey AWS_BUCKET_NAME = AttributeKey.stringKey("aws.bucket.name");
- static final AttributeKey AWS_QUEUE_NAME = AttributeKey.stringKey("aws.queue.name");
- static final AttributeKey AWS_STREAM_NAME = AttributeKey.stringKey("aws.stream.name");
- static final AttributeKey AWS_TABLE_NAME = AttributeKey.stringKey("aws.table.name");
+ static final AttributeKey AWS_BUCKET_NAME = stringKey("aws.bucket.name");
+ static final AttributeKey AWS_QUEUE_NAME = stringKey("aws.queue.name");
+ static final AttributeKey AWS_STREAM_NAME = stringKey("aws.stream.name");
+ static final AttributeKey AWS_TABLE_NAME = stringKey("aws.table.name");
}
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGenerator.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGenerator.java
index 42275b4b1..3fed3b3f3 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGenerator.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGenerator.java
@@ -5,6 +5,8 @@
package io.opentelemetry.contrib.awsxray;
+import static io.opentelemetry.api.common.AttributeKey.longKey;
+import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.contrib.awsxray.AwsAttributeKeys.AWS_BUCKET_NAME;
import static io.opentelemetry.contrib.awsxray.AwsAttributeKeys.AWS_LOCAL_OPERATION;
import static io.opentelemetry.contrib.awsxray.AwsAttributeKeys.AWS_LOCAL_SERVICE;
@@ -16,6 +18,7 @@
import static io.opentelemetry.contrib.awsxray.AwsAttributeKeys.AWS_STREAM_NAME;
import static io.opentelemetry.contrib.awsxray.AwsAttributeKeys.AWS_TABLE_NAME;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
+import static java.util.logging.Level.FINEST;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
@@ -27,7 +30,6 @@
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Optional;
-import java.util.logging.Level;
import java.util.logging.Logger;
/**
@@ -54,36 +56,31 @@ final class AwsMetricAttributeGenerator implements MetricAttributeGenerator {
private static final String UNKNOWN_REMOTE_OPERATION = "UnknownRemoteOperation";
// copied from DbIncubatingAttributes
- private static final AttributeKey DB_OPERATION = AttributeKey.stringKey("db.operation");
- private static final AttributeKey DB_SYSTEM = AttributeKey.stringKey("db.system");
+ private static final AttributeKey DB_OPERATION = stringKey("db.operation");
+ private static final AttributeKey DB_SYSTEM = stringKey("db.system");
// copied from FaasIncubatingAttributes
- private static final AttributeKey FAAS_INVOKED_NAME =
- AttributeKey.stringKey("faas.invoked_name");
- private static final AttributeKey FAAS_TRIGGER = AttributeKey.stringKey("faas.trigger");
+ private static final AttributeKey FAAS_INVOKED_NAME = stringKey("faas.invoked_name");
+ private static final AttributeKey FAAS_TRIGGER = stringKey("faas.trigger");
// copied from GraphqlIncubatingAttributes
private static final AttributeKey GRAPHQL_OPERATION_TYPE =
- AttributeKey.stringKey("graphql.operation.type");
+ stringKey("graphql.operation.type");
// copied from HttpIncubatingAttributes
- private static final AttributeKey HTTP_METHOD = AttributeKey.stringKey("http.method");
- private static final AttributeKey HTTP_TARGET = AttributeKey.stringKey("http.target");
- private static final AttributeKey HTTP_URL = AttributeKey.stringKey("http.url");
+ private static final AttributeKey HTTP_METHOD = stringKey("http.method");
+ private static final AttributeKey HTTP_TARGET = stringKey("http.target");
+ private static final AttributeKey HTTP_URL = stringKey("http.url");
// copied from MessagingIncubatingAttributes
- private static final AttributeKey MESSAGING_OPERATION =
- AttributeKey.stringKey("messaging.operation");
- private static final AttributeKey MESSAGING_SYSTEM =
- AttributeKey.stringKey("messaging.system");
+ private static final AttributeKey MESSAGING_OPERATION = stringKey("messaging.operation");
+ private static final AttributeKey MESSAGING_SYSTEM = stringKey("messaging.system");
// copied from NetIncubatingAttributes
- private static final AttributeKey NET_PEER_NAME = AttributeKey.stringKey("net.peer.name");
- private static final AttributeKey NET_PEER_PORT = AttributeKey.longKey("net.peer.port");
- private static final AttributeKey NET_SOCK_PEER_ADDR =
- AttributeKey.stringKey("net.sock.peer.addr");
- private static final AttributeKey NET_SOCK_PEER_PORT =
- AttributeKey.longKey("net.sock.peer.port");
+ private static final AttributeKey NET_PEER_NAME = stringKey("net.peer.name");
+ private static final AttributeKey NET_PEER_PORT = longKey("net.peer.port");
+ private static final AttributeKey NET_SOCK_PEER_ADDR = stringKey("net.sock.peer.addr");
+ private static final AttributeKey NET_SOCK_PEER_PORT = longKey("net.sock.peer.port");
// copied from PeerIncubatingAttributes
- private static final AttributeKey PEER_SERVICE = AttributeKey.stringKey("peer.service");
+ private static final AttributeKey PEER_SERVICE = stringKey("peer.service");
// copied from RpcIncubatingAttributes
- private static final AttributeKey RPC_METHOD = AttributeKey.stringKey("rpc.method");
- private static final AttributeKey RPC_SERVICE = AttributeKey.stringKey("rpc.service");
+ private static final AttributeKey RPC_METHOD = stringKey("rpc.method");
+ private static final AttributeKey RPC_SERVICE = stringKey("rpc.service");
@Override
public Attributes generateMetricAttributesFromSpan(SpanData span, Resource resource) {
@@ -307,7 +304,7 @@ private static String generateRemoteOperation(SpanData span) {
remoteOperation = extractApiPathValue(url.getPath());
}
} catch (MalformedURLException e) {
- logger.log(Level.FINEST, "invalid http.url attribute: ", httpUrl);
+ logger.log(FINEST, "invalid http.url attribute: ", httpUrl);
}
}
if (isKeyPresent(span, HTTP_METHOD)) {
@@ -387,6 +384,6 @@ private static void logUnknownAttribute(AttributeKey attributeKey, SpanD
String[] params = {
attributeKey.getKey(), span.getKind().name(), span.getSpanContext().getSpanId()
};
- logger.log(Level.FINEST, "No valid {0} value found for {1} span {2}", params);
+ logger.log(FINEST, "No valid {0} value found for {1} span {2}", params);
}
}
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessor.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessor.java
index 244138a47..69ca18476 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessor.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessor.java
@@ -5,6 +5,8 @@
package io.opentelemetry.contrib.awsxray;
+import static io.opentelemetry.api.common.AttributeKey.longKey;
+
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.DoubleHistogram;
@@ -40,8 +42,7 @@
@Immutable
public final class AwsSpanMetricsProcessor implements SpanProcessor {
- private static final AttributeKey HTTP_STATUS_CODE =
- AttributeKey.longKey("http.status_code");
+ private static final AttributeKey HTTP_STATUS_CODE = longKey("http.status_code");
private static final double NANOS_TO_MILLIS = 1_000_000.0;
@@ -152,16 +153,16 @@ private static Long getAwsStatusCode(SpanData spanData) {
Throwable throwable = exceptionEvent.getException();
try {
- Method method = throwable.getClass().getMethod("getStatusCode", new Class>[] {});
- Object code = method.invoke(throwable, new Object[] {});
+ Method method = throwable.getClass().getMethod("getStatusCode");
+ Object code = method.invoke(throwable);
return Long.valueOf((Integer) code);
} catch (Exception e) {
// Take no action
}
try {
- Method method = throwable.getClass().getMethod("statusCode", new Class>[] {});
- Object code = method.invoke(throwable, new Object[] {});
+ Method method = throwable.getClass().getMethod("statusCode");
+ Object code = method.invoke(throwable);
return Long.valueOf((Integer) code);
} catch (Exception e) {
// Take no action
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java
index ad9b72a2c..9c997f042 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java
@@ -5,6 +5,14 @@
package io.opentelemetry.contrib.awsxray;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static java.util.function.Function.identity;
+import static java.util.logging.Level.FINE;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toMap;
+import static java.util.stream.Collectors.toSet;
+
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.context.Context;
@@ -29,17 +37,13 @@
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import java.util.stream.Collectors;
import javax.annotation.Nullable;
/** Remote sampler that gets sampling configuration from AWS X-Ray. */
public final class AwsXrayRemoteSampler implements Sampler, Closeable {
- static final long DEFAULT_TARGET_INTERVAL_NANOS = TimeUnit.SECONDS.toNanos(10);
+ static final long DEFAULT_TARGET_INTERVAL_NANOS = SECONDS.toNanos(10);
private static final Logger logger = Logger.getLogger(AwsXrayRemoteSampler.class.getName());
@@ -134,7 +138,7 @@ private void getAndUpdateSampler() {
initialSampler,
response.getSamplingRules().stream()
.map(SamplingRuleRecord::getRule)
- .collect(Collectors.toList())));
+ .collect(toList())));
previousRulesResponse = response;
ScheduledFuture> existingFetchTargetsFuture = fetchTargetsFuture;
@@ -142,18 +146,17 @@ private void getAndUpdateSampler() {
existingFetchTargetsFuture.cancel(false);
}
fetchTargetsFuture =
- executor.schedule(
- this::fetchTargets, DEFAULT_TARGET_INTERVAL_NANOS, TimeUnit.NANOSECONDS);
+ executor.schedule(this::fetchTargets, DEFAULT_TARGET_INTERVAL_NANOS, NANOSECONDS);
}
} catch (Throwable t) {
- logger.log(Level.FINE, "Failed to update sampler", t);
+ logger.log(FINE, "Failed to update sampler", t);
}
scheduleSamplerUpdate();
}
private void scheduleSamplerUpdate() {
long delay = pollingIntervalNanos + jitterNanos.next();
- pollFuture = executor.schedule(this::getAndUpdateSampler, delay, TimeUnit.NANOSECONDS);
+ pollFuture = executor.schedule(this::getAndUpdateSampler, delay, NANOSECONDS);
}
/**
@@ -168,7 +171,7 @@ Duration getNextSamplerUpdateScheduledDuration() {
if (pollFuture == null) {
return null;
}
- return Duration.ofNanos(pollFuture.getDelay(TimeUnit.NANOSECONDS));
+ return Duration.ofNanos(pollFuture.getDelay(NANOSECONDS));
}
private void fetchTargets() {
@@ -181,28 +184,25 @@ private void fetchTargets() {
Date now = Date.from(Instant.ofEpochSecond(0, clock.now()));
List statistics = xrayRulesSampler.snapshot(now);
Set requestedTargetRuleNames =
- statistics.stream()
- .map(SamplingStatisticsDocument::getRuleName)
- .collect(Collectors.toSet());
+ statistics.stream().map(SamplingStatisticsDocument::getRuleName).collect(toSet());
GetSamplingTargetsResponse response =
client.getSamplingTargets(GetSamplingTargetsRequest.create(statistics));
Map targets =
response.getDocuments().stream()
- .collect(Collectors.toMap(SamplingTargetDocument::getRuleName, Function.identity()));
+ .collect(toMap(SamplingTargetDocument::getRuleName, identity()));
updateInternalSamplers(xrayRulesSampler.withTargets(targets, requestedTargetRuleNames, now));
} catch (Throwable t) {
// Might be a transient API failure, try again after a default interval.
fetchTargetsFuture =
- executor.schedule(
- this::fetchTargets, DEFAULT_TARGET_INTERVAL_NANOS, TimeUnit.NANOSECONDS);
+ executor.schedule(this::fetchTargets, DEFAULT_TARGET_INTERVAL_NANOS, NANOSECONDS);
return;
}
long nextTargetFetchIntervalNanos =
xrayRulesSampler.nextTargetFetchTimeNanos() - clock.nanoTime();
fetchTargetsFuture =
- executor.schedule(this::fetchTargets, nextTargetFetchIntervalNanos, TimeUnit.NANOSECONDS);
+ executor.schedule(this::fetchTargets, nextTargetFetchIntervalNanos, NANOSECONDS);
}
@Override
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerBuilder.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerBuilder.java
index 1ce0d41c1..25485e4b0 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerBuilder.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerBuilder.java
@@ -6,6 +6,8 @@
package io.opentelemetry.contrib.awsxray;
import static java.util.Objects.requireNonNull;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import io.opentelemetry.sdk.common.Clock;
@@ -26,7 +28,7 @@ public final class AwsXrayRemoteSamplerBuilder {
private Clock clock = Clock.getDefault();
private String endpoint = DEFAULT_ENDPOINT;
@Nullable private Sampler initialSampler;
- private long pollingIntervalNanos = TimeUnit.SECONDS.toNanos(DEFAULT_POLLING_INTERVAL_SECS);
+ private long pollingIntervalNanos = SECONDS.toNanos(DEFAULT_POLLING_INTERVAL_SECS);
AwsXrayRemoteSamplerBuilder(Resource resource) {
this.resource = resource;
@@ -51,7 +53,7 @@ public AwsXrayRemoteSamplerBuilder setEndpoint(String endpoint) {
@CanIgnoreReturnValue
public AwsXrayRemoteSamplerBuilder setPollingInterval(Duration delay) {
requireNonNull(delay, "delay");
- return setPollingInterval(delay.toNanos(), TimeUnit.NANOSECONDS);
+ return setPollingInterval(delay.toNanos(), NANOSECONDS);
}
/**
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java
index 6387aa0d7..ae4cac018 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java
@@ -5,7 +5,10 @@
package io.opentelemetry.contrib.awsxray;
+import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static java.util.stream.Collectors.toMap;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
@@ -19,28 +22,27 @@
import io.opentelemetry.sdk.trace.samplers.Sampler;
import io.opentelemetry.sdk.trace.samplers.SamplingDecision;
import io.opentelemetry.sdk.trace.samplers.SamplingResult;
+import io.opentelemetry.semconv.HttpAttributes;
+import io.opentelemetry.semconv.ServerAttributes;
+import io.opentelemetry.semconv.UrlAttributes;
import java.time.Duration;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
import java.util.regex.Pattern;
-import java.util.stream.Collectors;
import javax.annotation.Nullable;
final class SamplingRuleApplier {
// copied from AwsIncubatingAttributes
private static final AttributeKey AWS_ECS_CONTAINER_ARN =
- AttributeKey.stringKey("aws.ecs.container.arn");
+ stringKey("aws.ecs.container.arn");
// copied from CloudIncubatingAttributes
- private static final AttributeKey CLOUD_PLATFORM =
- AttributeKey.stringKey("cloud.platform");
- private static final AttributeKey CLOUD_RESOURCE_ID =
- AttributeKey.stringKey("cloud.resource_id");
+ private static final AttributeKey CLOUD_PLATFORM = stringKey("cloud.platform");
+ private static final AttributeKey CLOUD_RESOURCE_ID = stringKey("cloud.resource_id");
// copied from CloudIncubatingAttributes.CloudPlatformIncubatingValues
public static final String AWS_EC2 = "aws_ec2";
public static final String AWS_ECS = "aws_ecs";
@@ -48,15 +50,19 @@ final class SamplingRuleApplier {
public static final String AWS_LAMBDA = "aws_lambda";
public static final String AWS_ELASTIC_BEANSTALK = "aws_elastic_beanstalk";
// copied from HttpIncubatingAttributes
- private static final AttributeKey HTTP_HOST = AttributeKey.stringKey("http.host");
- private static final AttributeKey HTTP_METHOD = AttributeKey.stringKey("http.method");
- private static final AttributeKey HTTP_TARGET = AttributeKey.stringKey("http.target");
- private static final AttributeKey HTTP_URL = AttributeKey.stringKey("http.url");
+ private static final AttributeKey HTTP_HOST = stringKey("http.host");
+ private static final AttributeKey HTTP_METHOD = stringKey("http.method");
+ private static final AttributeKey HTTP_TARGET = stringKey("http.target");
+ private static final AttributeKey HTTP_URL = stringKey("http.url");
// copied from NetIncubatingAttributes
- private static final AttributeKey NET_HOST_NAME = AttributeKey.stringKey("net.host.name");
+ private static final AttributeKey NET_HOST_NAME = stringKey("net.host.name");
private static final Map XRAY_CLOUD_PLATFORM;
+ // _OTHER request method:
+ // https://github.com/open-telemetry/semantic-conventions/blob/main/docs/registry/attributes/http.md?plain=1#L96
+ private static final String _OTHER_REQUEST_METHOD = "_OTHER";
+
static {
Map xrayCloudPlatform = new HashMap<>();
xrayCloudPlatform.put(AWS_EC2, "AWS::EC2::Instance");
@@ -124,7 +130,7 @@ final class SamplingRuleApplier {
} else {
attributeMatchers =
rule.getAttributes().entrySet().stream()
- .collect(Collectors.toMap(Map.Entry::getKey, e -> toMatcher(e.getValue())));
+ .collect(toMap(Map.Entry::getKey, e -> toMatcher(e.getValue())));
}
urlPathMatcher = toMatcher(rule.getUrlPath());
@@ -175,25 +181,35 @@ private SamplingRuleApplier(
@SuppressWarnings("deprecation") // TODO
boolean matches(Attributes attributes, Resource resource) {
int matchedAttributes = 0;
- String httpTarget = null;
- String httpUrl = null;
- String httpMethod = null;
- String host = null;
- for (Map.Entry, Object> entry : attributes.asMap().entrySet()) {
- if (entry.getKey().equals(HTTP_TARGET)) {
- httpTarget = (String) entry.getValue();
- } else if (entry.getKey().equals(HTTP_URL)) {
- httpUrl = (String) entry.getValue();
- } else if (entry.getKey().equals(HTTP_METHOD)) {
- httpMethod = (String) entry.getValue();
- } else if (entry.getKey().equals(NET_HOST_NAME)) {
- host = (String) entry.getValue();
- } else if (entry.getKey().equals(HTTP_HOST)) {
- // TODO (trask) remove support for deprecated http.host attribute
- host = (String) entry.getValue();
+ String httpTarget = attributes.get(UrlAttributes.URL_PATH);
+ if (httpTarget == null) {
+ httpTarget = attributes.get(HTTP_TARGET);
+ }
+
+ String httpUrl = attributes.get(UrlAttributes.URL_FULL);
+ if (httpUrl == null) {
+ httpUrl = attributes.get(HTTP_URL);
+ }
+
+ String httpMethod = attributes.get(HttpAttributes.HTTP_REQUEST_METHOD);
+ if (httpMethod == null) {
+ httpMethod = attributes.get(HTTP_METHOD);
+ }
+
+ if (httpMethod != null && httpMethod.equals(_OTHER_REQUEST_METHOD)) {
+ httpMethod = attributes.get(HttpAttributes.HTTP_REQUEST_METHOD_ORIGINAL);
+ }
+
+ String host = attributes.get(ServerAttributes.SERVER_ADDRESS);
+ if (host == null) {
+ host = attributes.get(NET_HOST_NAME);
+ if (host == null) {
+ host = attributes.get(HTTP_HOST);
}
+ }
+ for (Map.Entry, Object> entry : attributes.asMap().entrySet()) {
Matcher matcher = attributeMatchers.get(entry.getKey().getKey());
if (matcher == null) {
continue;
@@ -300,7 +316,7 @@ SamplingRuleApplier withTarget(SamplingTargetDocument target, Date now) {
}
long intervalNanos =
target.getIntervalSecs() != null
- ? TimeUnit.SECONDS.toNanos(target.getIntervalSecs())
+ ? SECONDS.toNanos(target.getIntervalSecs())
: AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS;
long newNextSnapshotTimeNanos = clock.nanoTime() + intervalNanos;
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java
index 75977dc0f..e187da972 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java
@@ -5,6 +5,9 @@
package io.opentelemetry.contrib.awsxray;
+import static java.util.logging.Level.FINE;
+import static java.util.stream.Collectors.toList;
+
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.context.Context;
@@ -21,9 +24,7 @@
import java.util.Map;
import java.util.Objects;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import java.util.stream.Collectors;
final class XrayRulesSampler implements Sampler {
@@ -84,7 +85,7 @@ public SamplingResult shouldSample(
// In practice, X-Ray always returns a Default rule that matches all requests so it is a bug in
// our code or X-Ray to reach here, fallback just in case.
logger.log(
- Level.FINE,
+ FINE,
"No sampling rule matched the request. "
+ "This is a bug in either the OpenTelemetry SDK or X-Ray.");
return fallbackSampler.shouldSample(
@@ -100,7 +101,7 @@ List snapshot(Date now) {
return Arrays.stream(ruleAppliers)
.map(rule -> rule.snapshot(now))
.filter(Objects::nonNull)
- .collect(Collectors.toList());
+ .collect(toList());
}
long nextTargetFetchTimeNanos() {
diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XraySamplerClient.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XraySamplerClient.java
index 5dbbbbbbf..84dbd0144 100644
--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XraySamplerClient.java
+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XraySamplerClient.java
@@ -25,6 +25,8 @@
package io.opentelemetry.contrib.awsxray;
+import static java.util.logging.Level.FINE;
+
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -37,7 +39,6 @@
import java.io.UncheckedIOException;
import java.math.BigDecimal;
import java.util.Date;
-import java.util.logging.Level;
import java.util.logging.Logger;
import okhttp3.Call;
import okhttp3.MediaType;
@@ -51,7 +52,7 @@ final class XraySamplerClient {
private static final ObjectMapper OBJECT_MAPPER =
new ObjectMapper()
- .setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
+ .setDefaultPropertyInclusion(JsonInclude.Include.NON_EMPTY)
// AWS APIs return timestamps as floats.
.registerModule(
new SimpleModule().addDeserializer(Date.class, new FloatDateDeserializer()))
@@ -114,7 +115,7 @@ private T executeJsonRequest(String endpoint, Object request, Class respo
private static String readResponse(Response response, String endpoint) throws IOException {
if (!response.isSuccessful()) {
logger.log(
- Level.FINE,
+ FINE,
"Error response from "
+ endpoint
+ " code ("
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AttributePropagatingSpanProcessorTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AttributePropagatingSpanProcessorTest.java
index b4f40e408..0bf394e0e 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AttributePropagatingSpanProcessorTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AttributePropagatingSpanProcessorTest.java
@@ -41,7 +41,7 @@ public void setup() {
}
@Test
- public void testAttributesPropagation() {
+ void testAttributesPropagation() {
Span spanWithAppOnly = tracer.spanBuilder("parent").startSpan();
spanWithAppOnly.setAttribute(testKey1, "testValue1");
validateSpanAttributesInheritance(spanWithAppOnly, null, "testValue1", null);
@@ -57,7 +57,7 @@ public void testAttributesPropagation() {
}
@Test
- public void testOverrideAttributes() {
+ void testOverrideAttributes() {
Span parentSpan = tracer.spanBuilder("parent").startSpan();
parentSpan.setAttribute(testKey1, "testValue1");
parentSpan.setAttribute(testKey2, "testValue2");
@@ -75,13 +75,13 @@ public void testOverrideAttributes() {
}
@Test
- public void testAttributesDoNotExist() {
+ void testAttributesDoNotExist() {
Span span = tracer.spanBuilder("parent").startSpan();
validateSpanAttributesInheritance(span, null, null, null);
}
@Test
- public void testSpanNamePropagationBySpanKind() {
+ void testSpanNamePropagationBySpanKind() {
for (SpanKind value : SpanKind.values()) {
Span span = tracer.spanBuilder("parent").setSpanKind(value).startSpan();
if (value == SpanKind.SERVER || value == SpanKind.CONSUMER) {
@@ -93,7 +93,7 @@ public void testSpanNamePropagationBySpanKind() {
}
@Test
- public void testSpanNamePropagationWithRemoteParentSpan() {
+ void testSpanNamePropagationWithRemoteParentSpan() {
Span remoteParent =
Span.wrap(
SpanContext.createFromRemoteParent(
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGeneratorTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGeneratorTest.java
index 135a1eeff..4d38e89ff 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGeneratorTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributeGeneratorTest.java
@@ -79,7 +79,7 @@ public void setUpMocks() {
}
@Test
- public void testConsumerSpanWithoutAttributes() {
+ void testConsumerSpanWithoutAttributes() {
Attributes expectedAttributes =
Attributes.of(
AWS_SPAN_KIND, SpanKind.CONSUMER.name(),
@@ -89,7 +89,7 @@ public void testConsumerSpanWithoutAttributes() {
}
@Test
- public void testServerSpanWithoutAttributes() {
+ void testServerSpanWithoutAttributes() {
Attributes expectedAttributes =
Attributes.of(
AWS_SPAN_KIND, SpanKind.SERVER.name(),
@@ -99,7 +99,7 @@ public void testServerSpanWithoutAttributes() {
}
@Test
- public void testProducerSpanWithoutAttributes() {
+ void testProducerSpanWithoutAttributes() {
Attributes expectedAttributes =
Attributes.of(
AWS_SPAN_KIND, SpanKind.PRODUCER.name(),
@@ -111,7 +111,7 @@ public void testProducerSpanWithoutAttributes() {
}
@Test
- public void testClientSpanWithoutAttributes() {
+ void testClientSpanWithoutAttributes() {
Attributes expectedAttributes =
Attributes.of(
AWS_SPAN_KIND, SpanKind.CLIENT.name(),
@@ -123,13 +123,13 @@ public void testClientSpanWithoutAttributes() {
}
@Test
- public void testInternalSpan() {
+ void testInternalSpan() {
// Spans with internal span kind should not produce any attributes.
validateAttributesProducedForSpanOfKind(Attributes.empty(), SpanKind.INTERNAL);
}
@Test
- public void testConsumerSpanWithAttributes() {
+ void testConsumerSpanWithAttributes() {
updateResourceWithServiceName();
when(spanDataMock.getName()).thenReturn(SPAN_NAME_VALUE);
@@ -142,7 +142,7 @@ public void testConsumerSpanWithAttributes() {
}
@Test
- public void testServerSpanWithAttributes() {
+ void testServerSpanWithAttributes() {
updateResourceWithServiceName();
when(spanDataMock.getName()).thenReturn(SPAN_NAME_VALUE);
@@ -155,7 +155,7 @@ public void testServerSpanWithAttributes() {
}
@Test
- public void testServerSpanWithNullSpanName() {
+ void testServerSpanWithNullSpanName() {
updateResourceWithServiceName();
when(spanDataMock.getName()).thenReturn(null);
@@ -168,7 +168,7 @@ public void testServerSpanWithNullSpanName() {
}
@Test
- public void testServerSpanWithSpanNameAsHttpMethod() {
+ void testServerSpanWithSpanNameAsHttpMethod() {
updateResourceWithServiceName();
when(spanDataMock.getName()).thenReturn("GET");
mockAttribute(HTTP_METHOD, "GET");
@@ -183,7 +183,7 @@ public void testServerSpanWithSpanNameAsHttpMethod() {
}
@Test
- public void testServerSpanWithSpanNameWithHttpTarget() {
+ void testServerSpanWithSpanNameWithHttpTarget() {
updateResourceWithServiceName();
when(spanDataMock.getName()).thenReturn("POST");
mockAttribute(HTTP_METHOD, "POST");
@@ -203,7 +203,7 @@ public void testServerSpanWithSpanNameWithHttpTarget() {
}
@Test
- public void testProducerSpanWithAttributes() {
+ void testProducerSpanWithAttributes() {
updateResourceWithServiceName();
mockAttribute(AWS_LOCAL_OPERATION, AWS_LOCAL_OPERATION_VALUE);
mockAttribute(AWS_REMOTE_SERVICE, AWS_REMOTE_SERVICE_VALUE);
@@ -220,7 +220,7 @@ public void testProducerSpanWithAttributes() {
}
@Test
- public void testClientSpanWithAttributes() {
+ void testClientSpanWithAttributes() {
updateResourceWithServiceName();
mockAttribute(AWS_LOCAL_OPERATION, AWS_LOCAL_OPERATION_VALUE);
mockAttribute(AWS_REMOTE_SERVICE, AWS_REMOTE_SERVICE_VALUE);
@@ -237,7 +237,7 @@ public void testClientSpanWithAttributes() {
}
@Test
- public void testRemoteAttributesCombinations() {
+ void testRemoteAttributesCombinations() {
// Set all expected fields to a test string, we will overwrite them in descending order to test
// the priority-order logic in AwsMetricAttributeGenerator remote attribute methods.
mockAttribute(AWS_REMOTE_SERVICE, "TestString");
@@ -333,7 +333,7 @@ public void testRemoteAttributesCombinations() {
}
@Test
- public void testPeerServiceDoesOverrideOtherRemoteServices() {
+ void testPeerServiceDoesOverrideOtherRemoteServices() {
validatePeerServiceDoesOverride(RPC_SERVICE);
validatePeerServiceDoesOverride(DB_SYSTEM);
validatePeerServiceDoesOverride(FAAS_INVOKED_PROVIDER);
@@ -346,7 +346,7 @@ public void testPeerServiceDoesOverrideOtherRemoteServices() {
}
@Test
- public void testPeerServiceDoesNotOverrideAwsRemoteService() {
+ void testPeerServiceDoesNotOverrideAwsRemoteService() {
mockAttribute(AWS_REMOTE_SERVICE, "TestString");
mockAttribute(PEER_SERVICE, "PeerService");
@@ -357,7 +357,7 @@ public void testPeerServiceDoesNotOverrideAwsRemoteService() {
}
@Test
- public void testClientSpanWithRemoteTargetAttributes() {
+ void testClientSpanWithRemoteTargetAttributes() {
// Validate behaviour of aws bucket name attribute, then remove it.
mockAttribute(AWS_BUCKET_NAME, "aws_s3_bucket_name");
validateRemoteTargetAttributes(AWS_REMOTE_TARGET, "aws_s3_bucket_name");
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributesSpanExporterTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributesSpanExporterTest.java
index aedf5fa06..8502734d7 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributesSpanExporterTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsMetricAttributesSpanExporterTest.java
@@ -62,7 +62,7 @@ public void setUpMocks() {
}
@Test
- public void testPassthroughDelegations() {
+ void testPassthroughDelegations() {
awsMetricAttributesSpanExporter.flush();
awsMetricAttributesSpanExporter.shutdown();
awsMetricAttributesSpanExporter.close();
@@ -72,7 +72,7 @@ public void testPassthroughDelegations() {
}
@Test
- public void testExportDelegationWithoutAttributeOrModification() {
+ void testExportDelegationWithoutAttributeOrModification() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_NO_ATTRIBUTES);
SpanData spanDataMock = buildSpanDataMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_NO_ATTRIBUTES);
@@ -88,7 +88,7 @@ public void testExportDelegationWithoutAttributeOrModification() {
}
@Test
- public void testExportDelegationWithAttributeButWithoutModification() {
+ void testExportDelegationWithAttributeButWithoutModification() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_ATTRIBUTES);
SpanData spanDataMock = buildSpanDataMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_NO_ATTRIBUTES);
@@ -104,7 +104,7 @@ public void testExportDelegationWithAttributeButWithoutModification() {
}
@Test
- public void testExportDelegationWithoutAttributeButWithModification() {
+ void testExportDelegationWithoutAttributeButWithModification() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_NO_ATTRIBUTES);
SpanData spanDataMock = buildSpanDataMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
@@ -124,7 +124,7 @@ public void testExportDelegationWithoutAttributeButWithModification() {
}
@Test
- public void testExportDelegationWithAttributeAndModification() {
+ void testExportDelegationWithAttributeAndModification() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_ATTRIBUTES);
SpanData spanDataMock = buildSpanDataMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
@@ -146,7 +146,7 @@ public void testExportDelegationWithAttributeAndModification() {
}
@Test
- public void testExportDelegationWithMultipleSpans() {
+ void testExportDelegationWithMultipleSpans() {
Attributes spanAttributes1 = buildSpanAttributes(CONTAINS_NO_ATTRIBUTES);
SpanData spanDataMock1 = buildSpanDataMock(spanAttributes1);
Attributes metricAttributes1 = buildMetricAttributes(CONTAINS_NO_ATTRIBUTES);
@@ -185,7 +185,7 @@ public void testExportDelegationWithMultipleSpans() {
}
@Test
- public void testOverridenAttributes() {
+ void testOverridenAttributes() {
Attributes spanAttributes =
Attributes.of(
AttributeKey.stringKey("key1"),
@@ -217,7 +217,7 @@ public void testOverridenAttributes() {
}
@Test
- public void testExportDelegatingSpanDataBehaviour() {
+ void testExportDelegatingSpanDataBehaviour() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_ATTRIBUTES);
SpanData spanDataMock = buildSpanDataMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessorTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessorTest.java
index 0836f5a8e..5d10a6a3d 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessorTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsSpanMetricsProcessorTest.java
@@ -101,13 +101,13 @@ public void setUpMocks() {
}
@Test
- public void testIsRequired() {
+ void testIsRequired() {
assertThat(awsSpanMetricsProcessor.isStartRequired()).isFalse();
assertThat(awsSpanMetricsProcessor.isEndRequired()).isTrue();
}
@Test
- public void testStartDoesNothingToSpan() {
+ void testStartDoesNothingToSpan() {
Context parentContextMock = mock(Context.class);
ReadWriteSpan spanMock = mock(ReadWriteSpan.class);
awsSpanMetricsProcessor.onStart(parentContextMock, spanMock);
@@ -115,7 +115,7 @@ public void testStartDoesNothingToSpan() {
}
@Test
- public void testTearDown() {
+ void testTearDown() {
assertThat(awsSpanMetricsProcessor.shutdown()).isEqualTo(CompletableResultCode.ofSuccess());
assertThat(awsSpanMetricsProcessor.forceFlush()).isEqualTo(CompletableResultCode.ofSuccess());
@@ -128,7 +128,7 @@ public void testTearDown() {
* AwsSpanMetricsProcessor's onEnd method pertaining to metrics generation.
*/
@Test
- public void testOnEndMetricsGenerationWithoutSpanAttributes() {
+ void testOnEndMetricsGenerationWithoutSpanAttributes() {
Attributes spanAttributes = buildSpanAttributes(CONTAINS_NO_ATTRIBUTES);
ReadableSpan readableSpanMock = buildReadableSpanMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
@@ -141,7 +141,7 @@ public void testOnEndMetricsGenerationWithoutSpanAttributes() {
}
@Test
- public void testOnEndMetricsGenerationWithoutMetricAttributes() {
+ void testOnEndMetricsGenerationWithoutMetricAttributes() {
Attributes spanAttributes = Attributes.of(HTTP_STATUS_CODE, 500L);
ReadableSpan readableSpanMock = buildReadableSpanMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_NO_ATTRIBUTES);
@@ -154,7 +154,7 @@ public void testOnEndMetricsGenerationWithoutMetricAttributes() {
}
@Test
- public void testOnEndMetricsGenerationWithoutEndRequired() {
+ void testOnEndMetricsGenerationWithoutEndRequired() {
Attributes spanAttributes = Attributes.of(HTTP_STATUS_CODE, 500L);
ReadableSpan readableSpanMock = buildReadableSpanMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
@@ -167,7 +167,7 @@ public void testOnEndMetricsGenerationWithoutEndRequired() {
}
@Test
- public void testOnEndMetricsGenerationWithLatency() {
+ void testOnEndMetricsGenerationWithLatency() {
Attributes spanAttributes = Attributes.of(HTTP_STATUS_CODE, 200L);
ReadableSpan readableSpanMock = buildReadableSpanMock(spanAttributes);
Attributes metricAttributes = buildMetricAttributes(CONTAINS_ATTRIBUTES);
@@ -182,7 +182,7 @@ public void testOnEndMetricsGenerationWithLatency() {
}
@Test
- public void testOnEndMetricsGenerationWithAwsStatusCodes() {
+ void testOnEndMetricsGenerationWithAwsStatusCodes() {
validateMetricsGeneratedForAwsStatusCode(399L, ExpectedStatusMetric.NEITHER);
validateMetricsGeneratedForAwsStatusCode(400L, ExpectedStatusMetric.ERROR);
validateMetricsGeneratedForAwsStatusCode(499L, ExpectedStatusMetric.ERROR);
@@ -192,7 +192,7 @@ public void testOnEndMetricsGenerationWithAwsStatusCodes() {
}
@Test
- public void testOnEndMetricsGenerationWithStatusCodes() {
+ void testOnEndMetricsGenerationWithStatusCodes() {
// Invalid HTTP status codes
validateMetricsGeneratedForHttpStatusCode(null, ExpectedStatusMetric.NEITHER);
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java
index 4e5cd13bc..d45e00ad2 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java
@@ -196,7 +196,6 @@ void testJitterTruncation() {
.setEndpoint(server.httpUri().toString())
.setPollingInterval(Duration.ofMinutes(5))
.build()) {
- assertThat(samplerWithLongerPollingInterval.getNextSamplerUpdateScheduledDuration()).isNull();
await()
.untilAsserted(
() -> {
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/ResourceHolderTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/ResourceHolderTest.java
index 1140abc5f..f81f3d370 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/ResourceHolderTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/ResourceHolderTest.java
@@ -21,11 +21,11 @@
* Unit tests for {@link ResourceHolder}. Note that there isn't a great way to test the "default"
* fallback logic, as when the test suite is run, the customize logic appears to be invoked.
*/
-public class ResourceHolderTest {
+class ResourceHolderTest {
@Test
@SuppressWarnings("unchecked")
- public void testCustomized() {
+ void testCustomized() {
Resource customizedResource = Resource.create(Attributes.empty());
AutoConfigurationCustomizer mockCustomizer = mock(AutoConfigurationCustomizer.class);
ResourceHolder resourceHolder = new ResourceHolder();
diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java
index bc7bdd3e7..920a5ffd4 100644
--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java
+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java
@@ -29,6 +29,9 @@
import io.opentelemetry.sdk.testing.time.TestClock;
import io.opentelemetry.sdk.trace.samplers.SamplingDecision;
import io.opentelemetry.sdk.trace.samplers.SamplingResult;
+import io.opentelemetry.semconv.HttpAttributes;
+import io.opentelemetry.semconv.ServerAttributes;
+import io.opentelemetry.semconv.UrlAttributes;
import io.opentelemetry.semconv.incubating.CloudIncubatingAttributes;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -72,6 +75,15 @@ class ExactMatch {
.put(AttributeKey.longKey("speed"), 10)
.build();
+ private final Attributes stableSemConvAttributes =
+ Attributes.builder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "GET")
+ .put(ServerAttributes.SERVER_ADDRESS, "opentelemetry.io")
+ .put(UrlAttributes.URL_PATH, "/instrument-me")
+ .put(AttributeKey.stringKey("animal"), "cat")
+ .put(AttributeKey.longKey("speed"), 10)
+ .build();
+
// FixedRate set to 1.0 in rule and no reservoir
@Test
void fixedRateAlwaysSample() {
@@ -120,6 +132,21 @@ void matches() {
.isTrue();
}
+ @Test
+ void matchesURLFullStableSemConv() {
+ assertThat(applier.matches(stableSemConvAttributes, resource)).isTrue();
+
+ // url.full works too
+ assertThat(
+ applier.matches(
+ attributes.toBuilder()
+ .remove(HTTP_TARGET)
+ .put(UrlAttributes.URL_FULL, "scheme://host:port/instrument-me")
+ .build(),
+ resource))
+ .isTrue();
+ }
+
@Test
void serviceNameNotMatch() {
assertThat(
@@ -140,6 +167,15 @@ void methodNotMatch() {
assertThat(applier.matches(attributes, resource)).isFalse();
}
+ @Test
+ void methodStableSemConvNotMatch() {
+ Attributes attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "POST")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ }
+
@Test
void hostNotMatch() {
// Replacing dot with character makes sure we're not accidentally treating dot as regex
@@ -177,6 +213,36 @@ void pathNotMatch() {
assertThat(applier.matches(attributes, resource)).isFalse();
}
+ @Test
+ void pathStableSemConvNotMatch() {
+ Attributes attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(UrlAttributes.URL_PATH, "/instrument-you")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .remove(UrlAttributes.URL_PATH)
+ .put(UrlAttributes.URL_FULL, "scheme://host:port/instrument-you")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .remove(UrlAttributes.URL_PATH)
+ .put(UrlAttributes.URL_FULL, "scheme://host:port")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+
+ // Correct path, but we ignore anyways since the URL is malformed per spec, scheme is always
+ // present.
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .remove(UrlAttributes.URL_PATH)
+ .put(UrlAttributes.URL_FULL, "host:port/instrument-me")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ }
+
@Test
void attributeNotMatch() {
Attributes attributes =
@@ -235,6 +301,15 @@ class WildcardMatch {
.put(AttributeKey.longKey("speed"), 10)
.build();
+ private final Attributes stableSemConvAttributes =
+ Attributes.builder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "GET")
+ .put(ServerAttributes.SERVER_ADDRESS, "opentelemetry.io")
+ .put(UrlAttributes.URL_PATH, "/instrument-me?foo=bar&cat=meow")
+ .put(AttributeKey.stringKey("animal"), "cat")
+ .put(AttributeKey.longKey("speed"), 10)
+ .build();
+
// FixedRate set to 0.0 in rule and no reservoir
@Test
void fixedRateNeverSample() {
@@ -317,6 +392,36 @@ void methodNotMatch() {
assertThat(applier.matches(attributes, resource)).isFalse();
}
+ @Test
+ void stableSemConvMethodMatches() {
+ Attributes attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "BADGETGOOD")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "BADGET")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "GETGET")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ }
+
+ @Test
+ void stableSemConvMethodNotMatch() {
+ Attributes attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(HttpAttributes.HTTP_REQUEST_METHOD, "POST")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes = removeAttribute(stableSemConvAttributes, HttpAttributes.HTTP_REQUEST_METHOD);
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ }
+
@Test
void hostMatches() {
Attributes attributes =
@@ -345,6 +450,56 @@ void hostNotMatch() {
assertThat(applier.matches(attributes, resource)).isFalse();
}
+ @Test
+ void stableSemConvHostMatches() {
+ Attributes attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "alpha.opentelemetry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opfdnqtelemetry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opentglemetry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opentglemry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opentglemrz.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ }
+
+ @Test
+ void stableSemConvHostNotMatch() {
+ Attributes attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opentelemetryfio")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "opentgalemetry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes =
+ this.stableSemConvAttributes.toBuilder()
+ .put(ServerAttributes.SERVER_ADDRESS, "alpha.oentelemetry.io")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes = removeAttribute(this.stableSemConvAttributes, ServerAttributes.SERVER_ADDRESS);
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ }
+
@Test
void pathMatches() {
Attributes attributes =
@@ -368,6 +523,37 @@ void pathNotMatch() {
assertThat(applier.matches(attributes, resource)).isFalse();
}
+ @Test
+ void pathStableSemConvMatches() {
+ Attributes attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(UrlAttributes.URL_PATH, "/instrument-me?foo=bar&cat=")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ // Deceptive question mark, it's actually a wildcard :-)
+ attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(UrlAttributes.URL_PATH, "/instrument-meafoo=bar&cat=")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isTrue();
+ }
+
+ @Test
+ void pathStableSemConvNotMatch() {
+ Attributes attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(UrlAttributes.URL_PATH, "/instrument-mea?foo=bar&cat=")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes =
+ stableSemConvAttributes.toBuilder()
+ .put(UrlAttributes.URL_PATH, "foo/instrument-meafoo=bar&cat=")
+ .build();
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ attributes = removeAttribute(stableSemConvAttributes, UrlAttributes.URL_PATH);
+ assertThat(applier.matches(attributes, resource)).isFalse();
+ }
+
@Test
void attributeMatches() {
Attributes attributes =
diff --git a/azure-resources/build.gradle.kts b/azure-resources/build.gradle.kts
index 33af12d84..05c032d70 100644
--- a/azure-resources/build.gradle.kts
+++ b/azure-resources/build.gradle.kts
@@ -5,8 +5,8 @@ plugins {
id("maven-publish")
}
-description = "OpenTelemetry GCP Resources Support"
-otelJava.moduleName.set("io.opentelemetry.contrib.gcp.resource")
+description = "OpenTelemetry Azure Resources Support"
+otelJava.moduleName.set("io.opentelemetry.contrib.azure.resource")
// enable publishing to maven local
java {
@@ -15,6 +15,7 @@ java {
dependencies {
api("io.opentelemetry:opentelemetry-api")
+ compileOnly("io.opentelemetry:opentelemetry-api-incubator")
api("io.opentelemetry:opentelemetry-sdk")
implementation("io.opentelemetry.semconv:opentelemetry-semconv")
@@ -26,12 +27,24 @@ dependencies {
testImplementation("io.opentelemetry.semconv:opentelemetry-semconv-incubating")
testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure")
+ testImplementation("io.opentelemetry:opentelemetry-api-incubator")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
+ testImplementation("io.opentelemetry:opentelemetry-exporter-logging")
+ testImplementation("io.opentelemetry:opentelemetry-sdk-extension-incubator")
-// testImplementation("org.mockito:mockito-core")
testImplementation("com.google.guava:guava")
testImplementation("org.junit.jupiter:junit-jupiter-api")
testImplementation("org.assertj:assertj-core")
testImplementation("com.linecorp.armeria:armeria-junit5")
}
+
+tasks {
+ withType().configureEach {
+ environment(
+ "WEBSITE_SITE_NAME" to "my-function",
+ "FUNCTIONS_EXTENSION_VERSION" to "1.2.3"
+ )
+ jvmArgs("-Dotel.experimental.config.file=${project.projectDir.resolve("src/test/resources/declarative-config.yaml")}")
+ }
+}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAksResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAksResourceProvider.java
index 4d3b92280..987492dd8 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAksResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAksResourceProvider.java
@@ -8,14 +8,13 @@
import static io.opentelemetry.contrib.azure.resource.IncubatingAttributes.CloudPlatformIncubatingValues.AZURE_AKS;
import static io.opentelemetry.contrib.azure.resource.IncubatingAttributes.K8S_CLUSTER_NAME;
-import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.resources.Resource;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Supplier;
-public class AzureAksResourceProvider extends CloudResourceProvider {
+public final class AzureAksResourceProvider extends CloudResourceProvider {
private static final Map COMPUTE_MAPPING = new HashMap<>();
@@ -56,12 +55,12 @@ public AzureAksResourceProvider() {
@Override
public int order() {
// run after the fast cloud resource providers that only check environment variables
- // and before the AKS provider
+ // and before the VM provider
return 100;
}
@Override
- public Resource createResource(ConfigProperties configProperties) {
+ public Resource createResource() {
if (environment.get(KUBERNETES_SERVICE_HOST) == null) {
return Resource.empty();
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProvider.java
index 71cf699e6..3a658428a 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProvider.java
@@ -12,19 +12,18 @@
import static io.opentelemetry.contrib.azure.resource.IncubatingAttributes.HOST_ID;
import static io.opentelemetry.contrib.azure.resource.IncubatingAttributes.SERVICE_INSTANCE_ID;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
+import static java.util.Objects.requireNonNull;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
import io.opentelemetry.api.internal.StringUtils;
-import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.resources.Resource;
import java.util.HashMap;
import java.util.Map;
-import java.util.Objects;
import javax.annotation.Nullable;
-public class AzureAppServiceResourceProvider extends CloudResourceProvider {
+public final class AzureAppServiceResourceProvider extends CloudResourceProvider {
static final AttributeKey AZURE_APP_SERVICE_STAMP_RESOURCE_ATTRIBUTE =
AttributeKey.stringKey("azure.app.service.stamp");
@@ -60,7 +59,7 @@ public AzureAppServiceResourceProvider() {
}
@Override
- public Resource createResource(ConfigProperties config) {
+ public Resource createResource() {
return Resource.create(getAttributes());
}
@@ -69,7 +68,7 @@ public Attributes getAttributes() {
if (detect != AzureEnvVarPlatform.APP_SERVICE) {
return Attributes.empty();
}
- String name = Objects.requireNonNull(env.get(WEBSITE_SITE_NAME));
+ String name = requireNonNull(env.get(WEBSITE_SITE_NAME));
AttributesBuilder builder = AzureVmResourceProvider.azureAttributeBuilder(AZURE_APP_SERVICE);
builder.put(SERVICE_NAME, name);
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProvider.java
index 2f641148f..014ec5b41 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProvider.java
@@ -12,12 +12,11 @@
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
-import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.resources.Resource;
import java.util.HashMap;
import java.util.Map;
-public class AzureContainersResourceProvider extends CloudResourceProvider {
+public final class AzureContainersResourceProvider extends CloudResourceProvider {
static final String CONTAINER_APP_NAME = "CONTAINER_APP_NAME";
@@ -45,7 +44,7 @@ public AzureContainersResourceProvider() {
}
@Override
- public Resource createResource(ConfigProperties config) {
+ public Resource createResource() {
return Resource.create(getAttributes());
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProvider.java
index 1b86c6212..d98a41be4 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProvider.java
@@ -15,12 +15,11 @@
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
-import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.resources.Resource;
import java.util.HashMap;
import java.util.Map;
-public class AzureFunctionsResourceProvider extends CloudResourceProvider {
+public final class AzureFunctionsResourceProvider extends CloudResourceProvider {
static final String FUNCTIONS_VERSION = "FUNCTIONS_EXTENSION_VERSION";
private static final String FUNCTIONS_MEM_LIMIT = "WEBSITE_MEMORY_LIMIT_MB";
@@ -47,7 +46,7 @@ public AzureFunctionsResourceProvider() {
}
@Override
- public Resource createResource(ConfigProperties config) {
+ public Resource createResource() {
return Resource.create(getAttributes());
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureMetadataService.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureMetadataService.java
index d5bf44520..a93413a24 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureMetadataService.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureMetadataService.java
@@ -5,12 +5,13 @@
package io.opentelemetry.contrib.azure.resource;
+import static java.util.Objects.requireNonNull;
+
import com.fasterxml.jackson.core.JsonFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.time.Duration;
-import java.util.Objects;
import java.util.Optional;
import java.util.function.Supplier;
import java.util.logging.Level;
@@ -19,9 +20,11 @@
import okhttp3.Request;
import okhttp3.Response;
-public class AzureMetadataService {
+public final class AzureMetadataService {
static final JsonFactory JSON_FACTORY = new JsonFactory();
private static final URL METADATA_URL;
+ private static final Duration TIMEOUT = Duration.ofSeconds(5);
+ private static final Logger logger = Logger.getLogger(AzureMetadataService.class.getName());
static {
try {
@@ -31,12 +34,6 @@ public class AzureMetadataService {
}
}
- private AzureMetadataService() {}
-
- private static final Duration TIMEOUT = Duration.ofSeconds(5);
-
- private static final Logger logger = Logger.getLogger(AzureMetadataService.class.getName());
-
static Supplier> defaultClient() {
return () -> fetchMetadata(METADATA_URL);
}
@@ -66,10 +63,12 @@ static Optional fetchMetadata(URL url) {
return Optional.empty();
}
- return Optional.of(Objects.requireNonNull(response.body()).string());
+ return Optional.of(requireNonNull(response.body()).string());
} catch (IOException e) {
logger.log(Level.FINE, "Failed to fetch Azure VM metadata", e);
return Optional.empty();
}
}
+
+ private AzureMetadataService() {}
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureResourceDetector.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureResourceDetector.java
new file mode 100644
index 000000000..27da91c4c
--- /dev/null
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureResourceDetector.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.azure.resource;
+
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
+import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider;
+import io.opentelemetry.sdk.resources.Resource;
+import io.opentelemetry.sdk.resources.ResourceBuilder;
+
+public final class AzureResourceDetector implements ComponentProvider {
+
+ @Override
+ public Class getType() {
+ return Resource.class;
+ }
+
+ @Override
+ public String getName() {
+ return "azure";
+ }
+
+ @Override
+ public Resource create(DeclarativeConfigProperties config) {
+ Builder builder = new Builder();
+ builder.add(new AzureFunctionsResourceProvider());
+ builder.add(new AzureAppServiceResourceProvider());
+ builder.add(new AzureContainersResourceProvider());
+ builder.addIfEmpty(new AzureAksResourceProvider());
+ builder.addIfEmpty(new AzureVmResourceProvider());
+ return builder.builder.build();
+ }
+
+ private static class Builder {
+ final ResourceBuilder builder = Resource.builder();
+ int attributesCount = 0;
+
+ private void add(CloudResourceProvider provider) {
+ Attributes attributes = provider.createResource().getAttributes();
+ builder.putAll(attributes);
+ attributesCount += attributes.size();
+ }
+
+ private void addIfEmpty(CloudResourceProvider provider) {
+ if (attributesCount == 0) {
+ add(provider);
+ }
+ }
+ }
+}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureVmResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureVmResourceProvider.java
index 139b808d4..2a87a0488 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureVmResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/AzureVmResourceProvider.java
@@ -22,7 +22,6 @@
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
-import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.resources.Resource;
import java.io.IOException;
import java.util.HashMap;
@@ -35,21 +34,7 @@
import java.util.logging.Logger;
import org.jetbrains.annotations.NotNull;
-public class AzureVmResourceProvider extends CloudResourceProvider {
-
- static class Entry {
- final AttributeKey key;
- final Function transform;
-
- Entry(AttributeKey key) {
- this(key, Function.identity());
- }
-
- Entry(AttributeKey key, Function transform) {
- this.key = key;
- this.transform = transform;
- }
- }
+public final class AzureVmResourceProvider extends CloudResourceProvider {
private static final Map COMPUTE_MAPPING = new HashMap<>();
@@ -88,7 +73,7 @@ public int order() {
}
@Override
- public Resource createResource(ConfigProperties config) {
+ public Resource createResource() {
return client
.get()
.map(body -> parseMetadata(body, COMPUTE_MAPPING, AZURE_VM))
@@ -162,4 +147,18 @@ private static void consumeJson(JsonParser parser, BiConsumer co
consumer.accept(parser.currentName(), parser.nextTextValue());
}
}
+
+ static class Entry {
+ final AttributeKey key;
+ final Function transform;
+
+ Entry(AttributeKey key) {
+ this(key, Function.identity());
+ }
+
+ Entry(AttributeKey key, Function transform) {
+ this.key = key;
+ this.transform = transform;
+ }
+ }
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/CloudResourceProvider.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/CloudResourceProvider.java
index 181a22889..3c7fcc862 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/CloudResourceProvider.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/CloudResourceProvider.java
@@ -17,4 +17,12 @@ public abstract class CloudResourceProvider implements ConditionalResourceProvid
public final boolean shouldApply(ConfigProperties config, Resource existing) {
return existing.getAttribute(CLOUD_PROVIDER) == null;
}
+
+ @Override
+ public final Resource createResource(ConfigProperties config) {
+ // not using config in any providers
+ return createResource();
+ }
+
+ abstract Resource createResource();
}
diff --git a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/IncubatingAttributes.java b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/IncubatingAttributes.java
index 491cb99a8..524ca8727 100644
--- a/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/IncubatingAttributes.java
+++ b/azure-resources/src/main/java/io/opentelemetry/contrib/azure/resource/IncubatingAttributes.java
@@ -24,10 +24,10 @@ final class IncubatingAttributes {
AttributeKey.stringKey("cloud.resource_id");
public static final class CloudPlatformIncubatingValues {
- public static final String AZURE_VM = "azure_vm";
- public static final String AZURE_AKS = "azure_aks";
- public static final String AZURE_FUNCTIONS = "azure_functions";
- public static final String AZURE_APP_SERVICE = "azure_app_service";
+ public static final String AZURE_VM = "azure.vm";
+ public static final String AZURE_AKS = "azure.aks";
+ public static final String AZURE_FUNCTIONS = "azure.functions";
+ public static final String AZURE_APP_SERVICE = "azure.app_service";
private CloudPlatformIncubatingValues() {}
}
diff --git a/azure-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/azure-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
new file mode 100644
index 000000000..373780ff0
--- /dev/null
+++ b/azure-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
@@ -0,0 +1 @@
+io.opentelemetry.contrib.azure.resource.AzureResourceDetector
diff --git a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProviderTest.java b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProviderTest.java
index 20d856cba..90a7d27b5 100644
--- a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProviderTest.java
+++ b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureAppServiceResourceProviderTest.java
@@ -5,6 +5,7 @@
package io.opentelemetry.contrib.azure.resource;
+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PLATFORM;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PROVIDER;
@@ -16,7 +17,6 @@
import com.google.common.collect.ImmutableMap;
import io.opentelemetry.sdk.testing.assertj.AttributesAssert;
-import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
import java.util.HashMap;
import java.util.Map;
import org.jetbrains.annotations.NotNull;
@@ -48,7 +48,7 @@ void defaultValues() {
createResource(DEFAULT_ENV_VARS)
.containsEntry(SERVICE_NAME, TEST_WEBSITE_SITE_NAME)
.containsEntry(CLOUD_PROVIDER, "azure")
- .containsEntry(CLOUD_PLATFORM, "azure_app_service")
+ .containsEntry(CLOUD_PLATFORM, "azure.app_service")
.containsEntry(
CLOUD_RESOURCE_ID,
"/subscriptions/TEST_WEBSITE_OWNER_NAME/resourceGroups/TEST_WEBSITE_RESOURCE_GROUP/providers/Microsoft.Web/sites/TEST_WEBSITE_SITE_NAME")
@@ -98,7 +98,7 @@ void isFunction() {
@NotNull
private static AttributesAssert createResource(Map map) {
- return OpenTelemetryAssertions.assertThat(
+ return assertThat(
new AzureAppServiceResourceProvider(map).createResource(null).getAttributes());
}
}
diff --git a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProviderTest.java b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProviderTest.java
index 5ac1a4be7..082ceaf6a 100644
--- a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProviderTest.java
+++ b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureContainersResourceProviderTest.java
@@ -5,6 +5,7 @@
package io.opentelemetry.contrib.azure.resource;
+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME;
import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_VERSION;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PLATFORM;
@@ -13,7 +14,6 @@
import com.google.common.collect.ImmutableMap;
import io.opentelemetry.sdk.testing.assertj.AttributesAssert;
-import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
import java.util.HashMap;
import java.util.Map;
import org.jetbrains.annotations.NotNull;
@@ -50,7 +50,7 @@ void isNotContainer() {
@NotNull
private static AttributesAssert createResource(Map map) {
- return OpenTelemetryAssertions.assertThat(
+ return assertThat(
new AzureContainersResourceProvider(map).createResource(null).getAttributes());
}
}
diff --git a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProviderTest.java b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProviderTest.java
index 520e44543..b680088e8 100644
--- a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProviderTest.java
+++ b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/AzureFunctionsResourceProviderTest.java
@@ -5,6 +5,7 @@
package io.opentelemetry.contrib.azure.resource;
+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PLATFORM;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PROVIDER;
import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INSTANCE;
@@ -14,7 +15,6 @@
import com.google.common.collect.ImmutableMap;
import io.opentelemetry.sdk.testing.assertj.AttributesAssert;
-import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
import java.util.HashMap;
import java.util.Map;
import org.jetbrains.annotations.NotNull;
@@ -38,7 +38,7 @@ class AzureFunctionsResourceProviderTest {
void defaultValues() {
createResource(DEFAULT_ENV_VARS)
.containsEntry(CLOUD_PROVIDER, "azure")
- .containsEntry(CLOUD_PLATFORM, "azure_functions")
+ .containsEntry(CLOUD_PLATFORM, "azure.functions")
.containsEntry(FAAS_NAME, TEST_WEBSITE_SITE_NAME)
.containsEntry(FAAS_VERSION, TEST_FUNCTION_VERSION)
.containsEntry(FAAS_INSTANCE, TEST_WEBSITE_INSTANCE_ID)
@@ -55,7 +55,6 @@ void isNotFunction() {
@NotNull
private static AttributesAssert createResource(Map map) {
- return OpenTelemetryAssertions.assertThat(
- new AzureFunctionsResourceProvider(map).createResource(null).getAttributes());
+ return assertThat(new AzureFunctionsResourceProvider(map).createResource(null).getAttributes());
}
}
diff --git a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/MetadataBasedResourceProviderTest.java b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/MetadataBasedResourceProviderTest.java
index d827e8fd4..3d65c1e0a 100644
--- a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/MetadataBasedResourceProviderTest.java
+++ b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/MetadataBasedResourceProviderTest.java
@@ -5,6 +5,7 @@
package io.opentelemetry.contrib.azure.resource;
+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PLATFORM;
import static io.opentelemetry.semconv.incubating.CloudIncubatingAttributes.CLOUD_PROVIDER;
@@ -16,7 +17,6 @@
import io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.AttributesAssert;
-import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
@@ -28,7 +28,7 @@
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
-public abstract class MetadataBasedResourceProviderTest {
+abstract class MetadataBasedResourceProviderTest {
@RegisterExtension
public static final MockWebServerExtension server = new MockWebServerExtension();
@@ -47,7 +47,7 @@ private AttributesAssert mockServerResponse() {
@NotNull
private AttributesAssert createResource(Supplier> client) {
Resource resource = getResourceProvider(client).createResource(null);
- return OpenTelemetryAssertions.assertThat(resource.getAttributes());
+ return assertThat(resource.getAttributes());
}
@NotNull
@@ -79,30 +79,30 @@ protected static String okResponse() {
}
@Test
- public void successFromFile() {
+ void successFromFile() {
assertDefaultAttributes(createResource(() -> Optional.of(okResponse())));
}
@Test
- public void successFromMockServer() {
+ void successFromMockServer() {
server.enqueue(HttpResponse.of(MediaType.JSON, okResponse()));
assertDefaultAttributes(mockServerResponse());
}
@Test
- public void responseNotFound() {
+ void responseNotFound() {
server.enqueue(HttpResponse.of(HttpStatus.NOT_FOUND));
mockServerResponse().isEmpty();
}
@Test
- public void responseEmpty() {
+ void responseEmpty() {
server.enqueue(HttpResponse.of(""));
assertOnlyProvider(mockServerResponse());
}
@Test
- public void responseEmptyJson() {
+ void responseEmptyJson() {
server.enqueue(HttpResponse.of("{}"));
assertOnlyProvider(mockServerResponse());
}
diff --git a/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/ResourceComponentProviderTest.java b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/ResourceComponentProviderTest.java
new file mode 100644
index 000000000..0f67f3919
--- /dev/null
+++ b/azure-resources/src/test/java/io/opentelemetry/contrib/azure/resource/ResourceComponentProviderTest.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.azure.resource;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk;
+import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
+import io.opentelemetry.semconv.incubating.CloudIncubatingAttributes;
+import org.assertj.core.api.InstanceOfAssertFactory;
+import org.junit.jupiter.api.Test;
+
+class ResourceComponentProviderTest {
+
+ @Test
+ void endToEnd() {
+ assertThat(
+ AutoConfiguredOpenTelemetrySdk.builder()
+ .build()
+ .getOpenTelemetrySdk()
+ .getSdkTracerProvider())
+ .extracting("sharedState")
+ .extracting("resource")
+ .extracting(
+ "attributes",
+ new InstanceOfAssertFactory<>(Attributes.class, OpenTelemetryAssertions::assertThat))
+ .containsEntry(
+ CloudIncubatingAttributes.CLOUD_PROVIDER,
+ CloudIncubatingAttributes.CloudProviderIncubatingValues.AZURE);
+ }
+}
diff --git a/azure-resources/src/test/resources/declarative-config.yaml b/azure-resources/src/test/resources/declarative-config.yaml
new file mode 100644
index 000000000..748dbddd9
--- /dev/null
+++ b/azure-resources/src/test/resources/declarative-config.yaml
@@ -0,0 +1,10 @@
+file_format: "1.0-rc.1"
+resource:
+ detection/development:
+ detectors:
+ - azure:
+tracer_provider:
+ processors:
+ - simple:
+ exporter:
+ console:
diff --git a/baggage-processor/README.md b/baggage-processor/README.md
index 44719770b..10e98910d 100644
--- a/baggage-processor/README.md
+++ b/baggage-processor/README.md
@@ -25,6 +25,36 @@ processors through configuration.
| `otel.java.experimental.span-attributes.copy-from-baggage.include` | Add baggage entries as span attributes, e.g. `key1,key2` or `*` to add all baggage items as keys. |
| `otel.java.experimental.log-attributes.copy-from-baggage.include` | Add baggage entries as log attributes, e.g. `key1,key2` or `*` to add all baggage items as keys. |
+### Usage with declarative configuration
+
+You can configure the baggage span and log record processors using declarative YAML configuration with the OpenTelemetry SDK.
+
+For the tracer provider (span processor):
+
+```yaml
+file_format: 1.0-rc.1
+tracer_provider:
+ processors:
+ - baggage:
+ included: [foo]
+ excluded: [bar]
+```
+
+For the logger provider (log record processor):
+
+```yaml
+file_format: 1.0-rc.1
+logger_provider:
+ processors:
+ - baggage:
+ included: [foo]
+ excluded: [bar]
+```
+
+This will configure the respective processor to include baggage keys listed in `included` and
+exclude those in `excluded` as explained in
+[Properties which pattern matching](https://github.com/open-telemetry/opentelemetry-configuration/blob/main/CONTRIBUTING.md#properties-which-pattern-matching).
+
### Usage through programmatic activation
Add the span and log processor when configuring the tracer and logger providers.
diff --git a/baggage-processor/build.gradle.kts b/baggage-processor/build.gradle.kts
index 017158399..0ff9dd52a 100644
--- a/baggage-processor/build.gradle.kts
+++ b/baggage-processor/build.gradle.kts
@@ -8,11 +8,19 @@ description = "OpenTelemetry Baggage Span Processor"
otelJava.moduleName.set("io.opentelemetry.contrib.baggage.processor")
dependencies {
+ annotationProcessor("com.google.auto.service:auto-service")
+ compileOnly("com.google.auto.service:auto-service-annotations")
api("io.opentelemetry:opentelemetry-api")
api("io.opentelemetry:opentelemetry-sdk")
implementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi")
+ compileOnly("io.opentelemetry:opentelemetry-sdk-common")
+ compileOnly("io.opentelemetry:opentelemetry-sdk-extension-incubator")
+ testAnnotationProcessor("com.google.auto.service:auto-service")
+ testCompileOnly("com.google.auto.service:auto-service-annotations")
+ testImplementation("io.opentelemetry:opentelemetry-sdk-common")
testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure")
+ testImplementation("io.opentelemetry:opentelemetry-sdk-extension-incubator")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
testImplementation("org.mockito:mockito-inline")
testImplementation("com.google.guava:guava")
diff --git a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProvider.java b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProvider.java
new file mode 100644
index 000000000..be40ab97c
--- /dev/null
+++ b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProvider.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.baggage.processor;
+
+import com.google.auto.service.AutoService;
+import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
+import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider;
+import io.opentelemetry.sdk.internal.IncludeExcludePredicate;
+import io.opentelemetry.sdk.logs.LogRecordProcessor;
+
+@SuppressWarnings("rawtypes")
+@AutoService(ComponentProvider.class)
+public class BaggageLogRecordComponentProvider implements ComponentProvider {
+ @Override
+ public String getName() {
+ return "baggage";
+ }
+
+ @Override
+ public LogRecordProcessor create(DeclarativeConfigProperties config) {
+ return new BaggageLogRecordProcessor(
+ IncludeExcludePredicate.createPatternMatching(
+ config.getScalarList("included", String.class),
+ config.getScalarList("excluded", String.class)));
+ }
+
+ @Override
+ public Class getType() {
+ return LogRecordProcessor.class;
+ }
+}
diff --git a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordProcessor.java b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordProcessor.java
index 4e8c91505..474f4caef 100644
--- a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordProcessor.java
+++ b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordProcessor.java
@@ -16,15 +16,7 @@
* This log record processor copies attributes stored in {@link Baggage} into each newly created log
* record.
*/
-public class BaggageLogRecordProcessor implements LogRecordProcessor {
-
- /**
- * Creates a new {@link BaggageLogRecordProcessor} that copies all baggage entries into the newly
- * created log record.
- */
- public static BaggageLogRecordProcessor allowAllBaggageKeys() {
- return new BaggageLogRecordProcessor(baggageKey -> true);
- }
+public final class BaggageLogRecordProcessor implements LogRecordProcessor {
private final Predicate baggageKeyPredicate;
@@ -36,6 +28,14 @@ public BaggageLogRecordProcessor(Predicate baggageKeyPredicate) {
this.baggageKeyPredicate = baggageKeyPredicate;
}
+ /**
+ * Creates a new {@link BaggageLogRecordProcessor} that copies all baggage entries into the newly
+ * created log record.
+ */
+ public static BaggageLogRecordProcessor allowAllBaggageKeys() {
+ return new BaggageLogRecordProcessor(baggageKey -> true);
+ }
+
@Override
public void onEmit(Context context, ReadWriteLogRecord logRecord) {
Baggage.fromContext(context)
diff --git a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizer.java b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizer.java
index da35512a3..2e07722e6 100644
--- a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizer.java
+++ b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizer.java
@@ -5,6 +5,7 @@
package io.opentelemetry.contrib.baggage.processor;
+import com.google.auto.service.AutoService;
import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizer;
import io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider;
import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
@@ -12,7 +13,8 @@
import io.opentelemetry.sdk.trace.SdkTracerProviderBuilder;
import java.util.List;
-public class BaggageProcessorCustomizer implements AutoConfigurationCustomizerProvider {
+@AutoService(AutoConfigurationCustomizerProvider.class)
+public final class BaggageProcessorCustomizer implements AutoConfigurationCustomizerProvider {
@Override
public void customize(AutoConfigurationCustomizer autoConfigurationCustomizer) {
autoConfigurationCustomizer
@@ -37,7 +39,8 @@ private static void addSpanProcessor(
return;
}
- sdkTracerProviderBuilder.addSpanProcessor(createBaggageSpanProcessor(keys));
+ // need to add before the batch span processor
+ sdkTracerProviderBuilder.addSpanProcessorFirst(createBaggageSpanProcessor(keys));
}
static BaggageSpanProcessor createBaggageSpanProcessor(List keys) {
@@ -56,7 +59,8 @@ private static void addLogRecordProcessor(
return;
}
- sdkLoggerProviderBuilder.addLogRecordProcessor(createBaggageLogRecordProcessor(keys));
+ // need to add before the batch log processor
+ sdkLoggerProviderBuilder.addLogRecordProcessorFirst(createBaggageLogRecordProcessor(keys));
}
static BaggageLogRecordProcessor createBaggageLogRecordProcessor(List keys) {
diff --git a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProvider.java b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProvider.java
new file mode 100644
index 000000000..19acb6ba7
--- /dev/null
+++ b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProvider.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.baggage.processor;
+
+import com.google.auto.service.AutoService;
+import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
+import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider;
+import io.opentelemetry.sdk.internal.IncludeExcludePredicate;
+import io.opentelemetry.sdk.trace.SpanProcessor;
+
+@SuppressWarnings("rawtypes")
+@AutoService(ComponentProvider.class)
+public class BaggageSpanComponentProvider implements ComponentProvider {
+ @Override
+ public String getName() {
+ return "baggage";
+ }
+
+ @Override
+ public SpanProcessor create(DeclarativeConfigProperties config) {
+ return new BaggageSpanProcessor(
+ IncludeExcludePredicate.createPatternMatching(
+ config.getScalarList("included", String.class),
+ config.getScalarList("excluded", String.class)));
+ }
+
+ @Override
+ public Class getType() {
+ return SpanProcessor.class;
+ }
+}
diff --git a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessor.java b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessor.java
index 5f0f53d03..1ba62b19d 100644
--- a/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessor.java
+++ b/baggage-processor/src/main/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessor.java
@@ -16,7 +16,7 @@
* This span processor copies attributes stored in {@link Baggage} into each newly created {@link
* io.opentelemetry.api.trace.Span}.
*/
-public class BaggageSpanProcessor implements SpanProcessor {
+public final class BaggageSpanProcessor implements SpanProcessor {
private final Predicate baggageKeyPredicate;
/**
diff --git a/baggage-processor/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider b/baggage-processor/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider
deleted file mode 100644
index 8eb4afb06..000000000
--- a/baggage-processor/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.AutoConfigurationCustomizerProvider
+++ /dev/null
@@ -1 +0,0 @@
-io.opentelemetry.contrib.baggage.processor.BaggageProcessorCustomizer
diff --git a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProviderTest.java b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProviderTest.java
new file mode 100644
index 000000000..1c8bd28bb
--- /dev/null
+++ b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageLogRecordComponentProviderTest.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.baggage.processor;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import io.opentelemetry.sdk.OpenTelemetrySdk;
+import io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration;
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+import org.junit.jupiter.api.Test;
+
+class BaggageLogRecordComponentProviderTest {
+
+ @Test
+ void declarativeConfig() {
+ String yaml =
+ "file_format: 1.0-rc.1\n"
+ + "logger_provider:\n"
+ + " processors:\n"
+ + " - baggage:\n"
+ + " included: [foo]\n"
+ + " excluded: [bar]\n";
+
+ OpenTelemetrySdk sdk =
+ DeclarativeConfiguration.parseAndCreate(
+ new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8)));
+
+ assertThat(sdk).asString().contains("BaggageLogRecordProcessor");
+ }
+}
diff --git a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizerTest.java b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizerTest.java
index 9dcb9a4a7..645ff5334 100644
--- a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizerTest.java
+++ b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageProcessorCustomizerTest.java
@@ -12,13 +12,12 @@
import com.google.common.collect.ImmutableMap;
import io.opentelemetry.api.baggage.Baggage;
import io.opentelemetry.api.common.AttributeKey;
+import io.opentelemetry.common.ComponentLoader;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
import io.opentelemetry.sdk.OpenTelemetrySdk;
import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk;
import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdkBuilder;
-import io.opentelemetry.sdk.autoconfigure.internal.AutoConfigureUtil;
-import io.opentelemetry.sdk.autoconfigure.internal.ComponentLoader;
import io.opentelemetry.sdk.autoconfigure.internal.SpiHelper;
import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider;
@@ -49,11 +48,15 @@ class BaggageProcessorCustomizerTest {
private static final String MEMORY_EXPORTER = "memory";
@Test
- void test_customizer() {
+ void test_empty_customizer() {
assertCustomizer(
Collections.emptyMap(),
span -> assertThat(span).hasTotalAttributeCount(0),
logRecord -> assertThat(logRecord).hasTotalAttributeCount(0));
+ }
+
+ @Test
+ void test_customizer() {
Map properties = new HashMap<>();
properties.put("otel.java.experimental.span-attributes.copy-from-baggage.include", "key");
properties.put("otel.java.experimental.log-attributes.copy-from-baggage.include", "key");
@@ -113,50 +116,50 @@ private static OpenTelemetrySdk getOpenTelemetrySdk(
"none",
"otel.logs.exporter",
MEMORY_EXPORTER))
- .addPropertiesSupplier(() -> properties);
- AutoConfigureUtil.setComponentLoader(
- sdkBuilder,
- new ComponentLoader() {
- @SuppressWarnings("unchecked")
- @Override
- public List load(Class spiClass) {
- if (spiClass == ConfigurableSpanExporterProvider.class) {
- return Collections.singletonList(
- (T)
- new ConfigurableSpanExporterProvider() {
- @Override
- public SpanExporter createExporter(ConfigProperties configProperties) {
- return spanExporter;
- }
-
- @Override
- public String getName() {
- return MEMORY_EXPORTER;
- }
- });
- } else if (spiClass == ConfigurableLogRecordExporterProvider.class) {
- return Collections.singletonList(
- (T)
- new ConfigurableLogRecordExporterProvider() {
- @Override
- public LogRecordExporter createExporter(ConfigProperties configProperties) {
- return logRecordExporter;
- }
-
- @Override
- public String getName() {
- return MEMORY_EXPORTER;
- }
- });
- }
- return spiHelper.load(spiClass);
- }
- });
+ .addPropertiesSupplier(() -> properties)
+ .setComponentLoader(
+ new ComponentLoader() {
+ @Override
+ public List load(Class spiClass) {
+ if (spiClass.equals(ConfigurableSpanExporterProvider.class)) {
+ return Collections.singletonList(
+ spiClass.cast(
+ new ConfigurableSpanExporterProvider() {
+ @Override
+ public SpanExporter createExporter(
+ ConfigProperties configProperties) {
+ return spanExporter;
+ }
+
+ @Override
+ public String getName() {
+ return MEMORY_EXPORTER;
+ }
+ }));
+ } else if (spiClass.equals(ConfigurableLogRecordExporterProvider.class)) {
+ return Collections.singletonList(
+ spiClass.cast(
+ new ConfigurableLogRecordExporterProvider() {
+ @Override
+ public LogRecordExporter createExporter(
+ ConfigProperties configProperties) {
+ return logRecordExporter;
+ }
+
+ @Override
+ public String getName() {
+ return MEMORY_EXPORTER;
+ }
+ }));
+ }
+ return spiHelper.load(spiClass);
+ }
+ });
return sdkBuilder.build().getOpenTelemetrySdk();
}
@Test
- public void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSpan span) {
+ void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSpan span) {
try (BaggageSpanProcessor processor =
BaggageProcessorCustomizer.createBaggageSpanProcessor(Collections.singletonList("*"))) {
try (Scope ignore = Baggage.current().toBuilder().put("key", "value").build().makeCurrent()) {
@@ -167,7 +170,7 @@ public void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSp
}
@Test
- public void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches(
+ void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches(
@Mock ReadWriteSpan span) {
try (BaggageSpanProcessor processor =
BaggageProcessorCustomizer.createBaggageSpanProcessor(Collections.singletonList("key"))) {
@@ -185,7 +188,7 @@ public void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_m
}
@Test
- public void test_baggageLogRecordProcessor_adds_attributes_to_logRecord(
+ void test_baggageLogRecordProcessor_adds_attributes_to_logRecord(
@Mock ReadWriteLogRecord logRecord) {
try (BaggageLogRecordProcessor processor =
BaggageProcessorCustomizer.createBaggageLogRecordProcessor(
@@ -198,7 +201,7 @@ public void test_baggageLogRecordProcessor_adds_attributes_to_logRecord(
}
@Test
- public void test_baggageLogRecordProcessor_adds_attributes_to_spans_when_key_filter_matches(
+ void test_baggageLogRecordProcessor_adds_attributes_to_spans_when_key_filter_matches(
@Mock ReadWriteLogRecord logRecord) {
try (BaggageLogRecordProcessor processor =
BaggageProcessorCustomizer.createBaggageLogRecordProcessor(
diff --git a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProviderTest.java b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProviderTest.java
new file mode 100644
index 000000000..77399305e
--- /dev/null
+++ b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanComponentProviderTest.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.baggage.processor;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import io.opentelemetry.sdk.OpenTelemetrySdk;
+import io.opentelemetry.sdk.extension.incubator.fileconfig.DeclarativeConfiguration;
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+import org.junit.jupiter.api.Test;
+
+class BaggageSpanComponentProviderTest {
+
+ @Test
+ void declarativeConfig() {
+ String yaml =
+ "file_format: 1.0-rc.1\n"
+ + "tracer_provider:\n"
+ + " processors:\n"
+ + " - baggage:\n"
+ + " included: [foo]\n"
+ + " excluded: [bar]\n";
+
+ OpenTelemetrySdk sdk =
+ DeclarativeConfiguration.parseAndCreate(
+ new ByteArrayInputStream(yaml.getBytes(StandardCharsets.UTF_8)));
+
+ assertThat(sdk).asString().contains("BaggageSpanProcessor");
+ }
+}
diff --git a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessorTest.java b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessorTest.java
index ca1180dcc..dd7ea3826 100644
--- a/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessorTest.java
+++ b/baggage-processor/src/test/java/io/opentelemetry/contrib/baggage/processor/BaggageSpanProcessorTest.java
@@ -17,10 +17,10 @@
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
-public class BaggageSpanProcessorTest {
+class BaggageSpanProcessorTest {
@Test
- public void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSpan span) {
+ void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSpan span) {
try (BaggageSpanProcessor processor = BaggageSpanProcessor.allowAllBaggageKeys()) {
try (Scope ignore = Baggage.current().toBuilder().put("key", "value").build().makeCurrent()) {
processor.onStart(Context.current(), span);
@@ -30,7 +30,7 @@ public void test_baggageSpanProcessor_adds_attributes_to_spans(@Mock ReadWriteSp
}
@Test
- public void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches(
+ void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches(
@Mock ReadWriteSpan span) {
try (BaggageSpanProcessor processor = new BaggageSpanProcessor(key -> key.startsWith("k"))) {
try (Scope ignore =
@@ -47,7 +47,7 @@ public void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_m
}
@Test
- public void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches_regex(
+ void test_baggageSpanProcessor_adds_attributes_to_spans_when_key_filter_matches_regex(
@Mock ReadWriteSpan span) {
Pattern pattern = Pattern.compile("k.*");
try (BaggageSpanProcessor processor =
diff --git a/build.gradle.kts b/build.gradle.kts
index 6e586f336..2422f54ff 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -14,7 +14,10 @@ nexusPublishing {
packageGroup.set("io.opentelemetry")
repositories {
+ // see https://central.sonatype.org/publish/publish-portal-ossrh-staging-api/#configuration
sonatype {
+ nexusUrl.set(uri("https://ossrh-staging-api.central.sonatype.com/service/local/"))
+ snapshotRepositoryUrl.set(uri("https://central.sonatype.com/repository/maven-snapshots/"))
username.set(System.getenv("SONATYPE_USER"))
password.set(System.getenv("SONATYPE_KEY"))
}
diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts
index 406d7ff5b..e963b8e8e 100644
--- a/buildSrc/build.gradle.kts
+++ b/buildSrc/build.gradle.kts
@@ -1,7 +1,7 @@
plugins {
`kotlin-dsl`
// When updating, update below in dependencies too
- id("com.diffplug.spotless") version "7.0.3"
+ id("com.diffplug.spotless") version "8.0.0"
}
repositories {
@@ -12,10 +12,14 @@ repositories {
dependencies {
// When updating, update above in plugins too
- implementation("com.diffplug.spotless:spotless-plugin-gradle:7.0.3")
- implementation("net.ltgt.gradle:gradle-errorprone-plugin:4.1.0")
- implementation("net.ltgt.gradle:gradle-nullaway-plugin:2.2.0")
- implementation("org.owasp:dependency-check-gradle:12.1.1")
+ implementation("com.diffplug.spotless:com.diffplug.spotless.gradle.plugin:8.0.0")
+ implementation("net.ltgt.errorprone:net.ltgt.errorprone.gradle.plugin:4.3.0")
+ implementation("net.ltgt.nullaway:net.ltgt.nullaway.gradle.plugin:2.3.0")
+ implementation("org.owasp.dependencycheck:org.owasp.dependencycheck.gradle.plugin:12.1.6")
+ implementation("ru.vyarus.animalsniffer:ru.vyarus.animalsniffer.gradle.plugin:2.0.1")
+ implementation("com.gradle.develocity:com.gradle.develocity.gradle.plugin:4.2.1")
+ implementation("me.champeau.gradle.japicmp:me.champeau.gradle.japicmp.gradle.plugin:0.4.6")
+ implementation("com.google.auto.value:auto-value-annotations:1.11.0")
}
spotless {
diff --git a/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts
new file mode 100644
index 000000000..3fda84f4c
--- /dev/null
+++ b/buildSrc/src/main/kotlin/otel.animalsniffer-conventions.gradle.kts
@@ -0,0 +1,19 @@
+import ru.vyarus.gradle.plugin.animalsniffer.AnimalSniffer
+
+plugins {
+ id("otel.java-conventions")
+ id("ru.vyarus.animalsniffer")
+}
+
+dependencies {
+ signature("com.toasttab.android:gummy-bears-api-21:0.12.0:coreLib@signature")
+}
+
+animalsniffer {
+ sourceSets = listOf(java.sourceSets.main.get())
+}
+
+// Always having declared output makes this task properly participate in tasks up-to-date checks
+tasks.withType {
+ reports.text.required.set(true)
+}
diff --git a/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts
index 1dfc48318..113576db3 100644
--- a/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts
+++ b/buildSrc/src/main/kotlin/otel.errorprone-conventions.gradle.kts
@@ -49,7 +49,6 @@ tasks {
disable("UnnecessarilyFullyQualified")
// TODO (trask) use animal sniffer
- disable("Java7ApiChecker")
disable("Java8ApiChecker")
disable("AndroidJdkLibsChecker")
@@ -79,8 +78,8 @@ tasks {
// cognitive load is dubious.
disable("YodaCondition")
- // We get this warning in modules that compile for old java versions
- disable("StringConcatToTextBlock")
+ // Requires adding compile dependency to JSpecify
+ disable("AddNullMarkedToPackageInfo")
if (name.contains("Jmh") || name.contains("Test")) {
// Allow underscore in test-type method names
diff --git a/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts
new file mode 100644
index 000000000..3905c2526
--- /dev/null
+++ b/buildSrc/src/main/kotlin/otel.japicmp-conventions.gradle.kts
@@ -0,0 +1,158 @@
+import com.google.auto.value.AutoValue
+import japicmp.model.*
+import me.champeau.gradle.japicmp.JapicmpTask
+import me.champeau.gradle.japicmp.report.Violation
+import me.champeau.gradle.japicmp.report.stdrules.*
+
+
+plugins {
+ base
+
+ id("me.champeau.gradle.japicmp")
+}
+
+/**
+ * The latest *released* version of the project. Evaluated lazily so the work is only done if necessary.
+ */
+val latestReleasedVersion: String by lazy {
+ // hack to find the current released version of the project
+ val temp: Configuration = configurations.create("tempConfig") {
+ resolutionStrategy.cacheChangingModulesFor(0, "seconds")
+ resolutionStrategy.cacheDynamicVersionsFor(0, "seconds")
+ }
+ // pick aws-xray, since it's a stable module that's always there.
+ dependencies.add(temp.name, "io.opentelemetry.contrib:opentelemetry-aws-xray:latest.release")
+ val moduleVersion = configurations["tempConfig"].resolvedConfiguration.firstLevelModuleDependencies.elementAt(0).moduleVersion
+ configurations.remove(temp)
+ logger.debug("Discovered latest release version: " + moduleVersion)
+ moduleVersion
+}
+
+class AllowNewAbstractMethodOnAutovalueClasses : AbstractRecordingSeenMembers() {
+ override fun maybeAddViolation(member: JApiCompatibility): Violation? {
+ val allowableAutovalueChanges = setOf(JApiCompatibilityChangeType.METHOD_ABSTRACT_ADDED_TO_CLASS,
+ JApiCompatibilityChangeType.METHOD_ADDED_TO_PUBLIC_CLASS, JApiCompatibilityChangeType.ANNOTATION_ADDED)
+ if (member.compatibilityChanges.filter { !allowableAutovalueChanges.contains(it.type) }.isEmpty() &&
+ member is JApiMethod && isAutoValueClass(member.getjApiClass()))
+ {
+ return Violation.accept(member, "Autovalue will automatically add implementation")
+ }
+ if (member.compatibilityChanges.isEmpty() &&
+ member is JApiClass && isAutoValueClass(member)) {
+ return Violation.accept(member, "Autovalue class modification is allowed")
+ }
+ return null
+ }
+
+ fun isAutoValueClass(japiClass: JApiClass): Boolean {
+ return japiClass.newClass.get().getAnnotation(AutoValue::class.java) != null ||
+ japiClass.newClass.get().getAnnotation(AutoValue.Builder::class.java) != null
+ }
+}
+
+class SourceIncompatibleRule : AbstractRecordingSeenMembers() {
+ override fun maybeAddViolation(member: JApiCompatibility): Violation? {
+ if (!member.isSourceCompatible()) {
+ return Violation.error(member, "Not source compatible: $member")
+ }
+ return null
+ }
+}
+
+/**
+ * Locate the project's artifact of a particular version.
+ */
+fun findArtifact(version: String): File {
+ val existingGroup = group
+ try {
+ // Temporarily change the group name because we want to fetch an artifact with the same
+ // Maven coordinates as the project, which Gradle would not allow otherwise.
+ group = "virtual_group"
+ val depModule = "io.opentelemetry.contrib:${base.archivesName.get()}:$version@jar"
+ val depJar = "${base.archivesName.get()}-$version.jar"
+ val configuration: Configuration = configurations.detachedConfiguration(
+ dependencies.create(depModule),
+ )
+ return files(configuration.files).filter {
+ it.name.equals(depJar)
+ }.singleFile
+ } finally {
+ group = existingGroup
+ }
+}
+
+// generate the api diff report for any module that is stable and publishes a jar.
+if (project.findProperty("otel.stable") == "true" && !project.name.startsWith("bom")) {
+ afterEvaluate {
+ tasks {
+ val jApiCmp by registering(JapicmpTask::class) {
+ dependsOn("jar")
+
+ // the japicmp "new" version is either the user-specified one, or the locally built jar.
+ val apiNewVersion: String? by project
+ val newArtifact = apiNewVersion?.let { findArtifact(it) }
+ ?: file(getByName("jar").archiveFile)
+ newClasspath.from(files(newArtifact))
+
+ // only output changes, not everything
+ onlyModified.set(true)
+
+ // the japicmp "old" version is either the user-specified one, or the latest release.
+ val apiBaseVersion: String? by project
+ val baselineVersion = apiBaseVersion ?: latestReleasedVersion
+ oldClasspath.from(
+ try {
+ files(findArtifact(baselineVersion))
+ } catch (e: Exception) {
+ // if we can't find the baseline artifact, this is probably one that's never been published before,
+ // so publish the whole API. We do that by flipping this flag, and comparing the current against nothing.
+ onlyModified.set(false)
+ files()
+ },
+ )
+
+ // Reproduce defaults from https://github.com/melix/japicmp-gradle-plugin/blob/09f52739ef1fccda6b4310cf3f4b19dc97377024/src/main/java/me/champeau/gradle/japicmp/report/ViolationsGenerator.java#L130
+ // with some changes.
+ val exclusions = mutableListOf()
+ // Generics are not detected correctly
+ exclusions.add("CLASS_GENERIC_TEMPLATE_CHANGED")
+ // Allow new default methods on interfaces
+ exclusions.add("METHOD_NEW_DEFAULT")
+ // Allow adding default implementations for default methods
+ exclusions.add("METHOD_ABSTRACT_NOW_DEFAULT")
+ // Bug prevents recognizing default methods of superinterface.
+ // Fixed in https://github.com/siom79/japicmp/pull/343 but not yet available in me.champeau.gradle.japicmp
+ exclusions.add("METHOD_ABSTRACT_ADDED_IN_IMPLEMENTED_INTERFACE")
+ compatibilityChangeExcludes.set(exclusions)
+ richReport {
+ addSetupRule(RecordSeenMembersSetup::class.java)
+ addRule(JApiChangeStatus.NEW, SourceCompatibleRule::class.java)
+ addRule(JApiChangeStatus.MODIFIED, SourceCompatibleRule::class.java)
+ addRule(JApiChangeStatus.UNCHANGED, UnchangedMemberRule::class.java)
+ // Allow new abstract methods on autovalue
+ addRule(AllowNewAbstractMethodOnAutovalueClasses::class.java)
+ addRule(BinaryIncompatibleRule::class.java)
+ // Disallow source incompatible changes, which are allowed by default for some reason
+ addRule(SourceIncompatibleRule::class.java)
+ }
+
+ // this is needed so that we only consider the current artifact, and not dependencies
+ ignoreMissingClasses.set(true)
+ packageExcludes.addAll(
+ "*.internal",
+ "*.internal.*"
+ )
+ annotationExcludes.add("@kotlin.Metadata")
+ val baseVersionString = if (apiBaseVersion == null) "latest" else baselineVersion
+ txtOutputFile.set(
+ apiNewVersion?.let { file("$rootDir/docs/apidiffs/${apiNewVersion}_vs_$baselineVersion/${base.archivesName.get()}.txt") }
+ ?: file("$rootDir/docs/apidiffs/current_vs_$baseVersionString/${base.archivesName.get()}.txt"),
+ )
+ }
+ // have the check task depend on the api comparison task, to make it more likely it will get used.
+ named("check") {
+ dependsOn(jApiCmp)
+ }
+ }
+ }
+}
diff --git a/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts
index 75b9ff2f8..f512ccf2c 100644
--- a/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts
+++ b/buildSrc/src/main/kotlin/otel.java-conventions.gradle.kts
@@ -7,6 +7,7 @@ plugins {
id("otel.errorprone-conventions")
id("otel.spotless-conventions")
+ id("otel.japicmp-conventions")
id("org.owasp.dependencycheck")
}
@@ -66,10 +67,23 @@ tasks {
withType().configureEach {
useJUnitPlatform()
+ val maxTestRetries = gradle.startParameter.projectProperties["maxTestRetries"]?.toInt() ?: 0
+ develocity.testRetry {
+ // You can see tests that were retried by this mechanism in the collected test reports and build scans.
+ maxRetries.set(maxTestRetries)
+ }
+
testLogging {
exceptionFormat = TestExceptionFormat.FULL
showStandardStreams = true
}
+
+ configure {
+ // only care about code coverage for code in this repository
+ // (in particular avoiding netty classes which sometimes end up
+ // causing sporadic CI failures)
+ includes = listOf("io/opentelemetry/contrib/**")
+ }
}
withType().configureEach {
@@ -93,12 +107,13 @@ plugins.withId("otel.publish-conventions") {
register("generateVersionResource") {
val moduleName = otelJava.moduleName
val propertiesDir = moduleName.map { layout.buildDirectory.file("generated/properties/${it.replace('.', '/')}") }
+ val projectVersion = project.version.toString()
- inputs.property("project.version", project.version.toString())
+ inputs.property("project.version", projectVersion)
outputs.dir(propertiesDir)
doLast {
- File(propertiesDir.get().get().asFile, "version.properties").writeText("contrib.version=${project.version}")
+ File(propertiesDir.get().get().asFile, "version.properties").writeText("contrib.version=${projectVersion}")
}
}
}
@@ -133,12 +148,12 @@ dependencies {
testing {
suites.withType(JvmTestSuite::class).configureEach {
dependencies {
- implementation(project(project.path))
+ implementation(project())
- implementation(enforcedPlatform("org.junit:junit-bom:5.12.2"))
- implementation(enforcedPlatform("org.testcontainers:testcontainers-bom:1.20.6"))
- implementation(enforcedPlatform("com.google.guava:guava-bom:33.4.8-jre"))
- implementation(enforcedPlatform("com.linecorp.armeria:armeria-bom:1.32.4"))
+ implementation(enforcedPlatform("org.junit:junit-bom:5.14.0"))
+ implementation(enforcedPlatform("org.testcontainers:testcontainers-bom:1.21.3"))
+ implementation(enforcedPlatform("com.google.guava:guava-bom:33.5.0-jre"))
+ implementation(enforcedPlatform("com.linecorp.armeria:armeria-bom:1.33.4"))
compileOnly("com.google.auto.value:auto-value-annotations")
compileOnly("com.google.errorprone:error_prone_annotations")
diff --git a/buildSrc/src/main/kotlin/otel.publish-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.publish-conventions.gradle.kts
index 7b922edc8..070edf183 100644
--- a/buildSrc/src/main/kotlin/otel.publish-conventions.gradle.kts
+++ b/buildSrc/src/main/kotlin/otel.publish-conventions.gradle.kts
@@ -57,6 +57,17 @@ publishing {
developerConnection.set("scm:git:git@github.com:open-telemetry/opentelemetry-java-contrib.git")
url.set("git@github.com:open-telemetry/opentelemetry-java-contrib.git")
}
+
+ withXml {
+ // Since 5.0 okhttp uses gradle metadata to choose either okhttp-jvm or okhttp-android.
+ // This does not work for maven builds that don't understand gradle metadata. They end up
+ // using the okhttp artifact that is an empty jar. Here we replace usages of okhttp with
+ // okhttp-jvm so that maven could get the actual okhttp dependency instead of the empty jar.
+ var result = asString()
+ var modified = result.toString().replace(">okhttp<", ">okhttp-jvm<")
+ result.clear()
+ result.append(modified)
+ }
}
}
}
diff --git a/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts b/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts
index b1c39dcd0..f3d387872 100644
--- a/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts
+++ b/buildSrc/src/main/kotlin/otel.spotless-conventions.gradle.kts
@@ -8,11 +8,12 @@ spotless {
licenseHeaderFile(rootProject.file("buildscripts/spotless.license.java"), "(package|import|public|// Includes work from:)")
target("src/**/*.java")
}
- plugins.withId("groovy") {
- groovy {
- licenseHeaderFile(rootProject.file("buildscripts/spotless.license.java"), "(package|import|class)")
- }
- }
+ // commented out for now due to incompatibility with gradle cache configuration
+ // plugins.withId("groovy") {
+ // groovy {
+ // licenseHeaderFile(rootProject.file("buildscripts/spotless.license.java"), "(package|import|class)")
+ // }
+ // }
plugins.withId("scala") {
scala {
scalafmt()
diff --git a/cloudfoundry-resources/README.md b/cloudfoundry-resources/README.md
index 355f9ce6e..8ba62971c 100644
--- a/cloudfoundry-resources/README.md
+++ b/cloudfoundry-resources/README.md
@@ -17,7 +17,7 @@ This variable contains a JSON structure, which is parsed to fill the following a
| cloudfoundry.space.id | space_id |
| cloudfoundry.space.name | space_name |
-The resource attributes follow the [CloudFoundry semantic convention.](https://github.com/open-telemetry/semantic-conventions/blob/main/docs/attributes-registry/cloudfoundry.md).
+The resource attributes follow the [CloudFoundry semantic convention.](https://github.com/open-telemetry/semantic-conventions/blob/main/docs/resource/cloudfoundry.md).
A description of `VCAP_APPLICATION` is available in the [CloudFoundry documentation](https://docs.cloudfoundry.org/devguide/deploy-apps/environment-variable.html#VCAP-APPLICATION).
## Component owners
diff --git a/cloudfoundry-resources/build.gradle.kts b/cloudfoundry-resources/build.gradle.kts
index e768f7389..d70c44500 100644
--- a/cloudfoundry-resources/build.gradle.kts
+++ b/cloudfoundry-resources/build.gradle.kts
@@ -9,6 +9,7 @@ otelJava.moduleName.set("io.opentelemetry.contrib.cloudfoundry.resources")
dependencies {
api("io.opentelemetry:opentelemetry-api")
+ compileOnly("io.opentelemetry:opentelemetry-api-incubator")
api("io.opentelemetry:opentelemetry-sdk")
compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi")
@@ -19,6 +20,38 @@ dependencies {
testImplementation("io.opentelemetry.semconv:opentelemetry-semconv-incubating")
testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure")
+ testImplementation("io.opentelemetry:opentelemetry-api-incubator")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi")
+ testImplementation("io.opentelemetry:opentelemetry-exporter-logging")
+ testImplementation("io.opentelemetry:opentelemetry-sdk-extension-incubator")
+}
+
+tasks {
+ withType().configureEach {
+ environment(
+ "VCAP_APPLICATION" to """
+ {
+ "application_id": "0193a038-e615-7e5e-92ca-f4bcd7ba0a25",
+ "application_name": "cf-app-name",
+ "application_uris": [
+ "testapp.example.com"
+ ],
+ "cf_api": "https://api.cf.example.com",
+ "limits": {
+ "fds": 256
+ },
+ "instance_index": 1,
+ "organization_id": "0193a375-8d8e-7e0c-a832-01ce9ded40dc",
+ "organization_name": "cf-org-name",
+ "process_id": "0193a4e3-8fd3-71b9-9fe3-5640c53bf1e2",
+ "process_type": "web",
+ "space_id": "0193a7e7-da17-7ea4-8940-b1e07b401b16",
+ "space_name": "cf-space-name",
+ "users": null
+ }
+ """.trimIndent(),
+ )
+ jvmArgs("-Dotel.experimental.config.file=${project.projectDir.resolve("src/test/resources/declarative-config.yaml")}")
+ }
}
diff --git a/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResource.java b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResource.java
index 7d6313928..c8e7bd2f2 100644
--- a/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResource.java
+++ b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResource.java
@@ -5,6 +5,8 @@
package io.opentelemetry.contrib.cloudfoundry.resources;
+import static io.opentelemetry.api.common.AttributeKey.stringKey;
+
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
@@ -22,24 +24,22 @@ public final class CloudFoundryResource {
private static final String ENV_VCAP_APPLICATION = "VCAP_APPLICATION";
// copied from CloudfoundryIncubatingAttributes
- private static final AttributeKey CLOUDFOUNDRY_APP_ID =
- AttributeKey.stringKey("cloudfoundry.app.id");
+ private static final AttributeKey CLOUDFOUNDRY_APP_ID = stringKey("cloudfoundry.app.id");
private static final AttributeKey CLOUDFOUNDRY_APP_INSTANCE_ID =
- AttributeKey.stringKey("cloudfoundry.app.instance.id");
+ stringKey("cloudfoundry.app.instance.id");
private static final AttributeKey CLOUDFOUNDRY_APP_NAME =
- AttributeKey.stringKey("cloudfoundry.app.name");
- private static final AttributeKey CLOUDFOUNDRY_ORG_ID =
- AttributeKey.stringKey("cloudfoundry.org.id");
+ stringKey("cloudfoundry.app.name");
+ private static final AttributeKey CLOUDFOUNDRY_ORG_ID = stringKey("cloudfoundry.org.id");
private static final AttributeKey CLOUDFOUNDRY_ORG_NAME =
- AttributeKey.stringKey("cloudfoundry.org.name");
+ stringKey("cloudfoundry.org.name");
private static final AttributeKey CLOUDFOUNDRY_PROCESS_ID =
- AttributeKey.stringKey("cloudfoundry.process.id");
+ stringKey("cloudfoundry.process.id");
private static final AttributeKey CLOUDFOUNDRY_PROCESS_TYPE =
- AttributeKey.stringKey("cloudfoundry.process.type");
+ stringKey("cloudfoundry.process.type");
private static final AttributeKey CLOUDFOUNDRY_SPACE_ID =
- AttributeKey.stringKey("cloudfoundry.space.id");
+ stringKey("cloudfoundry.space.id");
private static final AttributeKey CLOUDFOUNDRY_SPACE_NAME =
- AttributeKey.stringKey("cloudfoundry.space.name");
+ stringKey("cloudfoundry.space.name");
private static final Logger LOG = Logger.getLogger(CloudFoundryResource.class.getName());
private static final JsonFactory JSON_FACTORY = new JsonFactory();
private static final Resource INSTANCE = buildResource(System::getenv);
diff --git a/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceDetector.java b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceDetector.java
new file mode 100644
index 000000000..357d83533
--- /dev/null
+++ b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceDetector.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.cloudfoundry.resources;
+
+import io.opentelemetry.api.incubator.config.DeclarativeConfigProperties;
+import io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider;
+import io.opentelemetry.sdk.resources.Resource;
+
+public final class CloudFoundryResourceDetector implements ComponentProvider {
+
+ @Override
+ public Class getType() {
+ return Resource.class;
+ }
+
+ @Override
+ public String getName() {
+ return "cloud_foundry";
+ }
+
+ @Override
+ public Resource create(DeclarativeConfigProperties config) {
+ return CloudFoundryResource.get();
+ }
+}
diff --git a/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceProvider.java b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceProvider.java
index e3f3e3c64..992eb93dc 100644
--- a/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceProvider.java
+++ b/cloudfoundry-resources/src/main/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceProvider.java
@@ -9,7 +9,7 @@
import io.opentelemetry.sdk.autoconfigure.spi.ResourceProvider;
import io.opentelemetry.sdk.resources.Resource;
-public class CloudFoundryResourceProvider implements ResourceProvider {
+public final class CloudFoundryResourceProvider implements ResourceProvider {
@Override
public Resource createResource(ConfigProperties configProperties) {
diff --git a/cloudfoundry-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider b/cloudfoundry-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
new file mode 100644
index 000000000..96092ce3d
--- /dev/null
+++ b/cloudfoundry-resources/src/main/resources/META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider
@@ -0,0 +1 @@
+io.opentelemetry.contrib.cloudfoundry.resources.CloudFoundryResourceDetector
diff --git a/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceTest.java b/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceTest.java
index 1c533cd8a..96474c966 100644
--- a/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceTest.java
+++ b/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/CloudFoundryResourceTest.java
@@ -5,7 +5,9 @@
package io.opentelemetry.contrib.cloudfoundry.resources;
+import static java.util.stream.Collectors.joining;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.fail;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.semconv.SchemaUrls;
@@ -18,8 +20,6 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
-import java.util.stream.Collectors;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
class CloudFoundryResourceTest {
@@ -36,11 +36,11 @@ private static String loadVcapApplicationSample(String filename) {
if (is != null) {
return new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))
.lines()
- .collect(Collectors.joining());
+ .collect(joining());
}
- Assertions.fail("Cannot load resource " + filename);
+ fail("Cannot load resource " + filename);
} catch (IOException e) {
- Assertions.fail("Error reading " + filename);
+ fail("Error reading " + filename);
}
return "";
}
diff --git a/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/ResourceComponentProviderTest.java b/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/ResourceComponentProviderTest.java
new file mode 100644
index 000000000..b4b659156
--- /dev/null
+++ b/cloudfoundry-resources/src/test/java/io/opentelemetry/contrib/cloudfoundry/resources/ResourceComponentProviderTest.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.cloudfoundry.resources;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk;
+import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
+import org.assertj.core.api.InstanceOfAssertFactory;
+import org.junit.jupiter.api.Test;
+
+class ResourceComponentProviderTest {
+ @Test
+ void endToEnd() {
+ assertThat(
+ AutoConfiguredOpenTelemetrySdk.builder()
+ .build()
+ .getOpenTelemetrySdk()
+ .getSdkTracerProvider())
+ .extracting("sharedState")
+ .extracting("resource")
+ .extracting(
+ "attributes",
+ new InstanceOfAssertFactory<>(Attributes.class, OpenTelemetryAssertions::assertThat))
+ .containsEntry("cloudfoundry.app.name", "cf-app-name");
+ }
+}
diff --git a/cloudfoundry-resources/src/test/resources/declarative-config.yaml b/cloudfoundry-resources/src/test/resources/declarative-config.yaml
new file mode 100644
index 000000000..dc6ddf5d3
--- /dev/null
+++ b/cloudfoundry-resources/src/test/resources/declarative-config.yaml
@@ -0,0 +1,10 @@
+file_format: "1.0-rc.1"
+resource:
+ detection/development:
+ detectors:
+ - cloud_foundry:
+tracer_provider:
+ processors:
+ - simple:
+ exporter:
+ console:
diff --git a/compressors/compressor-zstd/build.gradle.kts b/compressors/compressor-zstd/build.gradle.kts
index 63db3ed20..acc7f3fe9 100644
--- a/compressors/compressor-zstd/build.gradle.kts
+++ b/compressors/compressor-zstd/build.gradle.kts
@@ -9,7 +9,7 @@ otelJava.moduleName.set("io.opentelemetry.contrib.compressor.zstd")
dependencies {
api("io.opentelemetry:opentelemetry-exporter-common")
- implementation("com.github.luben:zstd-jni:1.5.7-2")
+ implementation("com.github.luben:zstd-jni:1.5.7-5")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
testImplementation("io.opentelemetry:opentelemetry-exporter-otlp")
diff --git a/consistent-sampling/README.md b/consistent-sampling/README.md
index 4f848eb3a..49f4c0565 100644
--- a/consistent-sampling/README.md
+++ b/consistent-sampling/README.md
@@ -5,7 +5,7 @@ There are two major components included here.
## Original proposal implementation
The original specification for consistent probability sampling is defined by
-
+
and .
It supports sampling probabilities that are power of 2 (1, 1/2, 1/4, ...), and uses 8-bit `r-value` and 8-bit `p-value` in tracestate.
@@ -14,18 +14,18 @@ The implementation of this proposal is contained by the package `io/opentelemetr
* **ConsistentSampler**:
abstract base class of all consistent sampler implementations below
* **ConsistentAlwaysOffSampler**:
- see
+ see
* **ConsistentAlwaysOnSampler**:
- see
+ see
* **ConsistentComposedAndSampler**:
allows combining two consistent samplers and samples when both samplers would sample
* **ConsistentComposedOrSampler**:
allows combining two consistent sampler and samples when at least one of both samplers would sample,
- see
+ see
* **ConsistentParentBasedSampler**:
- see
+ see
* **ConsistentProbabilityBasedSampler**:
- see
+ see
* **ConsistentRateLimitingSampler**:
a rate limiting sampler based on exponential smoothing that dynamically adjusts the sampling
probability based on the estimated rate of spans occurring to satisfy a given rate of sampled spans
diff --git a/consistent-sampling/build.gradle.kts b/consistent-sampling/build.gradle.kts
index 88cdf543a..5fc4135bb 100644
--- a/consistent-sampling/build.gradle.kts
+++ b/consistent-sampling/build.gradle.kts
@@ -9,6 +9,17 @@ otelJava.moduleName.set("io.opentelemetry.contrib.sampler")
dependencies {
api("io.opentelemetry:opentelemetry-sdk-trace")
api("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi")
- testImplementation("org.hipparchus:hipparchus-core:4.0.1")
- testImplementation("org.hipparchus:hipparchus-stat:4.0.1")
+ testImplementation("org.hipparchus:hipparchus-core:4.0.2")
+ testImplementation("org.hipparchus:hipparchus-stat:4.0.2")
+}
+
+tasks {
+ withType().configureEach {
+ develocity.testRetry {
+ // TODO (trask) fix flaky tests and remove this workaround
+ if (System.getenv().containsKey("CI")) {
+ maxRetries.set(5)
+ }
+ }
+ }
}
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentRateLimitingSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentRateLimitingSampler.java
index 9c2b93f74..14d6dfee4 100644
--- a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentRateLimitingSampler.java
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentRateLimitingSampler.java
@@ -74,7 +74,7 @@ private static final class State {
private final double effectiveWindowNanos;
private final long lastNanoTime;
- public State(double effectiveWindowCount, double effectiveWindowNanos, long lastNanoTime) {
+ State(double effectiveWindowCount, double effectiveWindowNanos, long lastNanoTime) {
this.effectiveWindowCount = effectiveWindowCount;
this.effectiveWindowNanos = effectiveWindowNanos;
this.lastNanoTime = lastNanoTime;
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessor.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessor.java
index c522bf1c6..67defd745 100644
--- a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessor.java
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessor.java
@@ -55,7 +55,7 @@ private static final class ReadableSpanWithPriority {
private final int rval;
private final long priority;
- public static ReadableSpanWithPriority create(
+ static ReadableSpanWithPriority create(
ReadableSpan readableSpan, RandomGenerator randomGenerator) {
String otelTraceStateString =
readableSpan.getSpanContext().getTraceState().get(OtelTraceState.TRACE_STATE_KEY);
@@ -201,7 +201,7 @@ private static final class Reservoir {
private final PriorityQueue queue;
private final RandomGenerator randomGenerator;
- public Reservoir(int reservoirSize, RandomGenerator randomGenerator) {
+ Reservoir(int reservoirSize, RandomGenerator randomGenerator) {
if (reservoirSize < 1) {
throw new IllegalArgumentException();
}
@@ -211,7 +211,7 @@ public Reservoir(int reservoirSize, RandomGenerator randomGenerator) {
this.randomGenerator = randomGenerator;
}
- public void add(ReadableSpanWithPriority readableSpanWithPriority) {
+ void add(ReadableSpanWithPriority readableSpanWithPriority) {
if (queue.size() < reservoirSize) {
queue.add(readableSpanWithPriority);
@@ -232,7 +232,7 @@ public void add(ReadableSpanWithPriority readableSpanWithPriority) {
}
}
- public List getResult() {
+ List getResult() {
if (numberOfDiscardedSpansWithMaxDiscardedRValue == 0) {
return queue.stream().map(x -> x.readableSpan.toSpanData()).collect(Collectors.toList());
@@ -294,7 +294,7 @@ public List getResult() {
return result;
}
- public boolean isEmpty() {
+ boolean isEmpty() {
return queue.isEmpty();
}
}
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSampler.java
index f2e92651c..253edf709 100644
--- a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSampler.java
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSampler.java
@@ -5,42 +5,20 @@
package io.opentelemetry.contrib.sampler.consistent56;
-import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateSamplingProbability;
-import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.checkThreshold;
-import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getInvalidThreshold;
-import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getMaxThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateThreshold;
-import io.opentelemetry.api.common.Attributes;
-import io.opentelemetry.api.trace.SpanKind;
-import io.opentelemetry.context.Context;
-import io.opentelemetry.sdk.trace.data.LinkData;
-import java.util.List;
-
-public class ConsistentFixedThresholdSampler extends ConsistentSampler {
+public class ConsistentFixedThresholdSampler extends ConsistentThresholdSampler {
private final long threshold;
private final String description;
protected ConsistentFixedThresholdSampler(long threshold) {
- checkThreshold(threshold);
- this.threshold = threshold;
-
- String thresholdString;
- if (threshold == getMaxThreshold()) {
- thresholdString = "max";
- } else {
- thresholdString =
- ConsistentSamplingUtil.appendLast56BitHexEncodedWithoutTrailingZeros(
- new StringBuilder(), threshold)
- .toString();
- }
+ this.threshold = getThreshold(threshold);
+ this.description = getThresholdDescription(threshold);
+ }
- this.description =
- "ConsistentFixedThresholdSampler{threshold="
- + thresholdString
- + ", sampling probability="
- + calculateSamplingProbability(threshold)
- + "}";
+ protected ConsistentFixedThresholdSampler(double samplingProbability) {
+ this(calculateThreshold(samplingProbability));
}
@Override
@@ -49,18 +27,7 @@ public String getDescription() {
}
@Override
- public SamplingIntent getSamplingIntent(
- Context parentContext,
- String name,
- SpanKind spanKind,
- Attributes attributes,
- List parentLinks) {
-
- return () -> {
- if (threshold == getMaxThreshold()) {
- return getInvalidThreshold();
- }
- return threshold;
- };
+ public long getThreshold() {
+ return threshold;
}
}
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSampler.java
index b58bee96a..0075c5692 100644
--- a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSampler.java
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSampler.java
@@ -104,7 +104,7 @@ private static final class State {
private final double effectiveDelegateProbability;
private final long lastNanoTime;
- public State(
+ State(
double effectiveWindowCount,
double effectiveWindowNanos,
long lastNanoTime,
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSampler.java
index 1b2cedf08..22ee83b8c 100644
--- a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSampler.java
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSampler.java
@@ -55,6 +55,17 @@ public static ConsistentSampler probabilityBased(double samplingProbability) {
return new ConsistentFixedThresholdSampler(threshold);
}
+ /**
+ * Returns a {@link ConsistentSampler} that samples each span with a known probability, where the
+ * probablity can be dynamically updated.
+ *
+ * @param samplingProbability the sampling probability
+ * @return a sampler
+ */
+ public static ConsistentSampler updateableProbabilityBased(double samplingProbability) {
+ return new ConsistentVariableThresholdSampler(samplingProbability);
+ }
+
/**
* Returns a new {@link ConsistentSampler} that respects the sampling decision of the parent span
* or falls-back to the given sampler if it is a root span.
@@ -186,10 +197,19 @@ public final SamplingResult shouldSample(
boolean isSampled;
boolean isAdjustedCountCorrect;
if (isValidThreshold(threshold)) {
- long randomness = getRandomness(otelTraceState, traceId);
- isSampled = threshold <= randomness;
isAdjustedCountCorrect = intent.isAdjustedCountReliable();
- } else { // DROP
+ // determine the randomness value to use
+ long randomness;
+ if (isAdjustedCountCorrect) {
+ randomness = getRandomness(otelTraceState, traceId);
+ } else {
+ // We cannot assume any particular distribution of the provided trace randomness,
+ // because the sampling decision may depend directly or indirectly on the randomness value;
+ // however, we still want to sample with probability corresponding to the obtained threshold
+ randomness = RandomValueGenerators.getDefault().generate(traceId);
+ }
+ isSampled = threshold <= randomness;
+ } else { // invalid threshold, DROP
isSampled = false;
isAdjustedCountCorrect = false;
}
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentThresholdSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentThresholdSampler.java
new file mode 100644
index 000000000..63c1dbeaa
--- /dev/null
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentThresholdSampler.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.sampler.consistent56;
+
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.appendLast56BitHexEncodedWithoutTrailingZeros;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateSamplingProbability;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.checkThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getInvalidThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getMaxThreshold;
+
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.api.trace.SpanKind;
+import io.opentelemetry.context.Context;
+import io.opentelemetry.sdk.trace.data.LinkData;
+import java.util.List;
+
+public abstract class ConsistentThresholdSampler extends ConsistentSampler {
+
+ protected abstract long getThreshold();
+
+ protected static long getThreshold(long threshold) {
+ checkThreshold(threshold);
+ return threshold;
+ }
+
+ protected static String getThresholdDescription(long threshold) {
+ String thresholdString;
+ if (threshold == getMaxThreshold()) {
+ thresholdString = "max";
+ } else {
+ thresholdString =
+ appendLast56BitHexEncodedWithoutTrailingZeros(new StringBuilder(), threshold).toString();
+ }
+
+ return "ConsistentFixedThresholdSampler{threshold="
+ + thresholdString
+ + ", sampling probability="
+ + calculateSamplingProbability(threshold)
+ + "}";
+ }
+
+ @Override
+ public SamplingIntent getSamplingIntent(
+ Context parentContext,
+ String name,
+ SpanKind spanKind,
+ Attributes attributes,
+ List parentLinks) {
+
+ return () -> {
+ if (getThreshold() == getMaxThreshold()) {
+ return getInvalidThreshold();
+ }
+ return getThreshold();
+ };
+ }
+}
diff --git a/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSampler.java b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSampler.java
new file mode 100644
index 000000000..1558e961c
--- /dev/null
+++ b/consistent-sampling/src/main/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSampler.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.sampler.consistent56;
+
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateSamplingProbability;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.checkThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getMaxThreshold;
+
+public class ConsistentVariableThresholdSampler extends ConsistentThresholdSampler {
+
+ private volatile long threshold;
+ private volatile String description = "";
+
+ protected ConsistentVariableThresholdSampler(double samplingProbability) {
+ setSamplingProbability(samplingProbability);
+ }
+
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
+ @Override
+ public long getThreshold() {
+ return threshold;
+ }
+
+ public void setSamplingProbability(double samplingProbability) {
+ long threshold = calculateThreshold(samplingProbability);
+ checkThreshold(threshold);
+ this.threshold = threshold;
+
+ String thresholdString;
+ if (threshold == getMaxThreshold()) {
+ thresholdString = "max";
+ } else {
+ thresholdString =
+ ConsistentSamplingUtil.appendLast56BitHexEncodedWithoutTrailingZeros(
+ new StringBuilder(), threshold)
+ .toString();
+ }
+
+ // tiny eventual consistency where the description would be out of date with the threshold,
+ // but this doesn't really matter
+ this.description =
+ "ConsistentVariableThresholdSampler{threshold="
+ + thresholdString
+ + ", sampling probability="
+ + calculateSamplingProbability(threshold)
+ + "}";
+ }
+}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentProbabilityBasedSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentProbabilityBasedSamplerTest.java
index 4b9d3e425..a05506bc8 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentProbabilityBasedSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentProbabilityBasedSamplerTest.java
@@ -6,7 +6,6 @@
package io.opentelemetry.contrib.sampler.consistent;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.jupiter.api.Assertions.assertTrue;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanKind;
@@ -25,7 +24,7 @@
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
-public class ConsistentProbabilityBasedSamplerTest {
+class ConsistentProbabilityBasedSamplerTest {
private Context parentContext;
private String traceId;
@@ -63,8 +62,8 @@ private void test(SplittableRandom rng, double samplingProbability) {
.getUpdatedTraceState(TraceState.getDefault())
.get(OtelTraceState.TRACE_STATE_KEY);
OtelTraceState traceState = OtelTraceState.parse(traceStateString);
- assertTrue(traceState.hasValidR());
- assertTrue(traceState.hasValidP());
+ assertThat(traceState.hasValidR()).isTrue();
+ assertThat(traceState.hasValidP()).isTrue();
observedPvalues.merge(traceState.getP(), 1L, Long::sum);
}
}
@@ -72,7 +71,7 @@ private void test(SplittableRandom rng, double samplingProbability) {
}
@Test
- public void test() {
+ void test() {
// fix seed to get reproducible results
SplittableRandom random = new SplittableRandom(0);
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessorTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessorTest.java
index 476a31983..1415d0f6f 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessorTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentReservoirSamplingSpanProcessorTest.java
@@ -11,7 +11,6 @@
import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
import static org.awaitility.Awaitility.await;
-import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.argThat;
@@ -125,7 +124,7 @@ public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
- public void reset() {
+ void reset() {
this.countDownLatch = new CountDownLatch(numberOfSpansToWaitFor);
}
}
@@ -566,8 +565,8 @@ private void testConsistentSampling(
String traceStateString =
spanData.getSpanContext().getTraceState().get(OtelTraceState.TRACE_STATE_KEY);
OtelTraceState traceState = OtelTraceState.parse(traceStateString);
- assertTrue(traceState.hasValidR());
- assertTrue(traceState.hasValidP());
+ assertThat(traceState.hasValidR()).isTrue();
+ assertThat(traceState.hasValidP()).isTrue();
observedPvalues.merge(traceState.getP(), 1L, Long::sum);
totalAdjustedCount += 1L << traceState.getP();
spanNameCounts.merge(spanData.getName(), 1L, Long::sum);
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentSamplerTest.java
index 082ac3068..1a61868c8 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/ConsistentSamplerTest.java
@@ -8,9 +8,7 @@
import static io.opentelemetry.contrib.sampler.consistent.OtelTraceState.getInvalidP;
import static io.opentelemetry.contrib.sampler.consistent.OtelTraceState.getInvalidR;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.Span;
@@ -32,45 +30,50 @@ class ConsistentSamplerTest {
@Test
void testGetSamplingRate() {
- assertThrows(
- IllegalArgumentException.class, () -> ConsistentSampler.getSamplingProbability(-1));
+ assertThatThrownBy(() -> ConsistentSampler.getSamplingProbability(-1))
+ .isInstanceOf(IllegalArgumentException.class);
for (int i = 0; i < OtelTraceState.getMaxP() - 1; i += 1) {
- assertEquals(Math.pow(0.5, i), ConsistentSampler.getSamplingProbability(i));
+ assertThat(ConsistentSampler.getSamplingProbability(i)).isEqualTo(Math.pow(0.5, i));
}
- assertEquals(0., ConsistentSampler.getSamplingProbability(OtelTraceState.getMaxP()));
- assertThrows(
- IllegalArgumentException.class,
- () -> ConsistentSampler.getSamplingProbability(OtelTraceState.getMaxP() + 1));
+ assertThat(ConsistentSampler.getSamplingProbability(OtelTraceState.getMaxP())).isEqualTo(0.);
+ assertThatThrownBy(() -> ConsistentSampler.getSamplingProbability(OtelTraceState.getMaxP() + 1))
+ .isInstanceOf(IllegalArgumentException.class);
}
@Test
void testGetLowerBoundP() {
- assertEquals(0, ConsistentSampler.getLowerBoundP(1.0));
- assertEquals(0, ConsistentSampler.getLowerBoundP(Math.nextDown(1.0)));
+ assertThat(ConsistentSampler.getLowerBoundP(1.0)).isEqualTo(0);
+ assertThat(ConsistentSampler.getLowerBoundP(Math.nextDown(1.0))).isEqualTo(0);
for (int i = 1; i < OtelTraceState.getMaxP() - 1; i += 1) {
double samplingProbability = Math.pow(0.5, i);
- assertEquals(i, ConsistentSampler.getLowerBoundP(samplingProbability));
- assertEquals(i - 1, ConsistentSampler.getLowerBoundP(Math.nextUp(samplingProbability)));
- assertEquals(i, ConsistentSampler.getLowerBoundP(Math.nextDown(samplingProbability)));
+ assertThat(ConsistentSampler.getLowerBoundP(samplingProbability)).isEqualTo(i);
+ assertThat(ConsistentSampler.getLowerBoundP(Math.nextUp(samplingProbability)))
+ .isEqualTo(i - 1);
+ assertThat(ConsistentSampler.getLowerBoundP(Math.nextDown(samplingProbability))).isEqualTo(i);
}
- assertEquals(OtelTraceState.getMaxP() - 1, ConsistentSampler.getLowerBoundP(Double.MIN_NORMAL));
- assertEquals(OtelTraceState.getMaxP() - 1, ConsistentSampler.getLowerBoundP(Double.MIN_VALUE));
- assertEquals(OtelTraceState.getMaxP(), ConsistentSampler.getLowerBoundP(0.0));
+ assertThat(ConsistentSampler.getLowerBoundP(Double.MIN_NORMAL))
+ .isEqualTo(OtelTraceState.getMaxP() - 1);
+ assertThat(ConsistentSampler.getLowerBoundP(Double.MIN_VALUE))
+ .isEqualTo(OtelTraceState.getMaxP() - 1);
+ assertThat(ConsistentSampler.getLowerBoundP(0.0)).isEqualTo(OtelTraceState.getMaxP());
}
@Test
void testGetUpperBoundP() {
- assertEquals(0, ConsistentSampler.getUpperBoundP(1.0));
- assertEquals(1, ConsistentSampler.getUpperBoundP(Math.nextDown(1.0)));
+ assertThat(ConsistentSampler.getUpperBoundP(1.0)).isEqualTo(0);
+ assertThat(ConsistentSampler.getUpperBoundP(Math.nextDown(1.0))).isEqualTo(1);
for (int i = 1; i < OtelTraceState.getMaxP() - 1; i += 1) {
double samplingProbability = Math.pow(0.5, i);
- assertEquals(i, ConsistentSampler.getUpperBoundP(samplingProbability));
- assertEquals(i, ConsistentSampler.getUpperBoundP(Math.nextUp(samplingProbability)));
- assertEquals(i + 1, ConsistentSampler.getUpperBoundP(Math.nextDown(samplingProbability)));
+ assertThat(ConsistentSampler.getUpperBoundP(samplingProbability)).isEqualTo(i);
+ assertThat(ConsistentSampler.getUpperBoundP(Math.nextUp(samplingProbability))).isEqualTo(i);
+ assertThat(ConsistentSampler.getUpperBoundP(Math.nextDown(samplingProbability)))
+ .isEqualTo(i + 1);
}
- assertEquals(OtelTraceState.getMaxP(), ConsistentSampler.getUpperBoundP(Double.MIN_NORMAL));
- assertEquals(OtelTraceState.getMaxP(), ConsistentSampler.getUpperBoundP(Double.MIN_VALUE));
- assertEquals(OtelTraceState.getMaxP(), ConsistentSampler.getUpperBoundP(0.0));
+ assertThat(ConsistentSampler.getUpperBoundP(Double.MIN_NORMAL))
+ .isEqualTo(OtelTraceState.getMaxP());
+ assertThat(ConsistentSampler.getUpperBoundP(Double.MIN_VALUE))
+ .isEqualTo(OtelTraceState.getMaxP());
+ assertThat(ConsistentSampler.getUpperBoundP(0.0)).isEqualTo(OtelTraceState.getMaxP());
}
@Test
@@ -168,18 +171,18 @@ private static void assertConsistentSampling(
SamplingResult samplingResult =
sampler.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks);
- assertEquals(expectSampled, getSampledFlag(samplingResult));
+ assertThat(getSampledFlag(samplingResult)).isEqualTo(expectSampled);
OptionalInt p = getP(samplingResult, parentContext);
if (OtelTraceState.isValidP(expectedP)) {
- assertEquals(expectedP, p.getAsInt());
+ assertThat(p.getAsInt()).isEqualTo(expectedP);
} else {
- assertFalse(p.isPresent());
+ assertThat(p.isPresent()).isFalse();
}
OptionalInt r = getR(samplingResult, parentContext);
if (OtelTraceState.isValidR(expectedR)) {
- assertEquals(expectedR, r.getAsInt());
+ assertThat(r.getAsInt()).isEqualTo(expectedR);
} else {
- assertFalse(r.isPresent());
+ assertThat(r.isPresent()).isFalse();
}
}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/OtelTraceStateTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/OtelTraceStateTest.java
index a6fd85d47..fbb6b6dc7 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/OtelTraceStateTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/OtelTraceStateTest.java
@@ -5,75 +5,72 @@
package io.opentelemetry.contrib.sampler.consistent;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
-public class OtelTraceStateTest {
+class OtelTraceStateTest {
private static String getXString(int len) {
return Stream.generate(() -> "X").limit(len).collect(Collectors.joining());
}
@Test
- public void test() {
+ void test() {
- Assertions.assertEquals("", OtelTraceState.parse("").serialize());
- assertEquals("", OtelTraceState.parse("").serialize());
+ assertThat(OtelTraceState.parse("").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("").serialize()).isEqualTo("");
- assertEquals("", OtelTraceState.parse("a").serialize());
- assertEquals("", OtelTraceState.parse("#").serialize());
- assertEquals("", OtelTraceState.parse(" ").serialize());
+ assertThat(OtelTraceState.parse("a").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("#").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse(" ").serialize()).isEqualTo("");
- assertEquals("p:5", OtelTraceState.parse("p:5").serialize());
- assertEquals("p:63", OtelTraceState.parse("p:63").serialize());
- assertEquals("", OtelTraceState.parse("p:64").serialize());
- assertEquals("", OtelTraceState.parse("p:5;").serialize());
- assertEquals("", OtelTraceState.parse("p:99").serialize());
- assertEquals("", OtelTraceState.parse("p:").serialize());
- assertEquals("", OtelTraceState.parse("p:232").serialize());
- assertEquals("", OtelTraceState.parse("x;p:5").serialize());
- assertEquals("", OtelTraceState.parse("p:5;x").serialize());
- assertEquals("p:5;x:3", OtelTraceState.parse("x:3;p:5").serialize());
- assertEquals("p:5;x:3", OtelTraceState.parse("p:5;x:3").serialize());
- assertEquals("", OtelTraceState.parse("p:5;x:3;").serialize());
- assertEquals(
- "p:5;a:" + getXString(246) + ";x:3",
- OtelTraceState.parse("a:" + getXString(246) + ";p:5;x:3").serialize());
- assertEquals("", OtelTraceState.parse("a:" + getXString(247) + ";p:5;x:3").serialize());
+ assertThat(OtelTraceState.parse("p:5").serialize()).isEqualTo("p:5");
+ assertThat(OtelTraceState.parse("p:63").serialize()).isEqualTo("p:63");
+ assertThat(OtelTraceState.parse("p:64").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p:5;").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p:99").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p:").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p:232").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("x;p:5").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p:5;x").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("x:3;p:5").serialize()).isEqualTo("p:5;x:3");
+ assertThat(OtelTraceState.parse("p:5;x:3").serialize()).isEqualTo("p:5;x:3");
+ assertThat(OtelTraceState.parse("p:5;x:3;").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("a:" + getXString(246) + ";p:5;x:3").serialize())
+ .isEqualTo("p:5;a:" + getXString(246) + ";x:3");
+ assertThat(OtelTraceState.parse("a:" + getXString(247) + ";p:5;x:3").serialize()).isEqualTo("");
- assertEquals("r:5", OtelTraceState.parse("r:5").serialize());
- assertEquals("r:62", OtelTraceState.parse("r:62").serialize());
- assertEquals("", OtelTraceState.parse("r:63").serialize());
- assertEquals("", OtelTraceState.parse("r:5;").serialize());
- assertEquals("", OtelTraceState.parse("r:99").serialize());
- assertEquals("", OtelTraceState.parse("r:").serialize());
- assertEquals("", OtelTraceState.parse("r:232").serialize());
- assertEquals("", OtelTraceState.parse("x;r:5").serialize());
- assertEquals("", OtelTraceState.parse("r:5;x").serialize());
- assertEquals("r:5;x:3", OtelTraceState.parse("x:3;r:5").serialize());
- assertEquals("r:5;x:3", OtelTraceState.parse("r:5;x:3").serialize());
- assertEquals("", OtelTraceState.parse("r:5;x:3;").serialize());
- assertEquals(
- "r:5;a:" + getXString(246) + ";x:3",
- OtelTraceState.parse("a:" + getXString(246) + ";r:5;x:3").serialize());
- assertEquals("", OtelTraceState.parse("a:" + getXString(247) + ";r:5;x:3").serialize());
+ assertThat(OtelTraceState.parse("r:5").serialize()).isEqualTo("r:5");
+ assertThat(OtelTraceState.parse("r:62").serialize()).isEqualTo("r:62");
+ assertThat(OtelTraceState.parse("r:63").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("r:5;").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("r:99").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("r:").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("r:232").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("x;r:5").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("r:5;x").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("x:3;r:5").serialize()).isEqualTo("r:5;x:3");
+ assertThat(OtelTraceState.parse("r:5;x:3").serialize()).isEqualTo("r:5;x:3");
+ assertThat(OtelTraceState.parse("r:5;x:3;").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("a:" + getXString(246) + ";r:5;x:3").serialize())
+ .isEqualTo("r:5;a:" + getXString(246) + ";x:3");
+ assertThat(OtelTraceState.parse("a:" + getXString(247) + ";r:5;x:3").serialize()).isEqualTo("");
- assertEquals("p:7;r:5", OtelTraceState.parse("r:5;p:7").serialize());
- assertEquals("p:4;r:5", OtelTraceState.parse("r:5;p:4").serialize());
- assertEquals("p:7;r:5", OtelTraceState.parse("r:5;p:7").serialize());
- assertEquals("p:4;r:5", OtelTraceState.parse("r:5;p:4").serialize());
+ assertThat(OtelTraceState.parse("r:5;p:7").serialize()).isEqualTo("p:7;r:5");
+ assertThat(OtelTraceState.parse("r:5;p:4").serialize()).isEqualTo("p:4;r:5");
+ assertThat(OtelTraceState.parse("r:5;p:7").serialize()).isEqualTo("p:7;r:5");
+ assertThat(OtelTraceState.parse("r:5;p:4").serialize()).isEqualTo("p:4;r:5");
- assertEquals("r:6", OtelTraceState.parse("r:5;r:6").serialize());
- assertEquals("p:6;r:10", OtelTraceState.parse("p:5;p:6;r:10").serialize());
- assertEquals("", OtelTraceState.parse("p5;p:6;r:10").serialize());
- assertEquals("p:6;r:10;p5:3", OtelTraceState.parse("p5:3;p:6;r:10").serialize());
- assertEquals("", OtelTraceState.parse(":p:6;r:10").serialize());
- assertEquals("", OtelTraceState.parse(";p:6;r:10").serialize());
- assertEquals("", OtelTraceState.parse("_;p:6;r:10").serialize());
- assertEquals("", OtelTraceState.parse("5;p:6;r:10").serialize());
+ assertThat(OtelTraceState.parse("r:5;r:6").serialize()).isEqualTo("r:6");
+ assertThat(OtelTraceState.parse("p:5;p:6;r:10").serialize()).isEqualTo("p:6;r:10");
+ assertThat(OtelTraceState.parse("p5;p:6;r:10").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("p5:3;p:6;r:10").serialize()).isEqualTo("p:6;r:10;p5:3");
+ assertThat(OtelTraceState.parse(":p:6;r:10").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse(";p:6;r:10").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("_;p:6;r:10").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("5;p:6;r:10").serialize()).isEqualTo("");
}
}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/RandomGeneratorTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/RandomGeneratorTest.java
index f94e7eef4..e2f336727 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/RandomGeneratorTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent/RandomGeneratorTest.java
@@ -14,7 +14,7 @@
import org.hipparchus.stat.inference.GTest;
import org.junit.jupiter.api.Test;
-public class RandomGeneratorTest {
+class RandomGeneratorTest {
private static void testGenerateRandomBitSet(long seed, int numBits, int numOneBits) {
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOffSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOffSamplerTest.java
index 9b5fc050b..d0425aa0b 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOffSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOffSamplerTest.java
@@ -10,7 +10,7 @@
import org.junit.jupiter.api.Test;
-public class ConsistentAlwaysOffSamplerTest {
+class ConsistentAlwaysOffSamplerTest {
@Test
void testDescription() {
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOnSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOnSamplerTest.java
index 3a6b8531b..115c39c41 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOnSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentAlwaysOnSamplerTest.java
@@ -10,7 +10,7 @@
import org.junit.jupiter.api.Test;
-public class ConsistentAlwaysOnSamplerTest {
+class ConsistentAlwaysOnSamplerTest {
@Test
void testDescription() {
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSamplerTest.java
index 7eac3ffb1..3d78de81a 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentFixedThresholdSamplerTest.java
@@ -25,7 +25,7 @@
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
-public class ConsistentFixedThresholdSamplerTest {
+class ConsistentFixedThresholdSamplerTest {
private Context parentContext;
private String name;
@@ -75,7 +75,7 @@ private void testSampling(SplittableRandom rng, double samplingProbability) {
}
@Test
- public void testSampling() {
+ void testSampling() {
// fix seed to get reproducible results
SplittableRandom random = new SplittableRandom(0);
@@ -92,7 +92,7 @@ public void testSampling() {
}
@Test
- public void testDescription() {
+ void testDescription() {
assertThat(ConsistentSampler.probabilityBased(1.0).getDescription())
.isEqualTo("ConsistentFixedThresholdSampler{threshold=0, sampling probability=1.0}");
assertThat(ConsistentSampler.probabilityBased(0.5).getDescription())
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSamplerTest.java
index cc56df1ef..d5cb6b640 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentRateLimitingSamplerTest.java
@@ -10,7 +10,11 @@
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.api.trace.Span;
+import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.api.trace.SpanKind;
+import io.opentelemetry.api.trace.TraceFlags;
+import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.trace.data.LinkData;
import io.opentelemetry.sdk.trace.samplers.SamplingDecision;
@@ -357,6 +361,200 @@ void testProportionalBehavior() {
.isCloseTo(targetSpansPerSecondLimit, Percentage.withPercentage(5));
}
+ @Test
+ void testUnstableDelegate() {
+ // Assume there are 10,000 spans/s and the delegate samples 50% of them with probability 100%,
+ // and unconditionally rejects the rest.
+ //
+ // Now, if we do not want to sample more than 1000 spans/s overall, the rate limiting
+ // sampler should calculate the effective threshold correctly.
+
+ double targetSpansPerSecondLimit = 1000;
+ double adaptationTimeSeconds = 5;
+
+ Composable delegate =
+ new CoinFlipSampler(ConsistentSampler.alwaysOff(), ConsistentSampler.alwaysOn());
+
+ ConsistentSampler sampler =
+ ConsistentSampler.rateLimited(
+ delegate, targetSpansPerSecondLimit, adaptationTimeSeconds, nanoTimeSupplier);
+
+ long averageRequestRatePerSecond = 10000;
+ int numSpans = 1000000;
+
+ List spanSampledNanos = new ArrayList<>();
+
+ for (int i = 0; i < numSpans; ++i) {
+ advanceTime(randomInterval(averageRequestRatePerSecond));
+ SamplingResult samplingResult =
+ sampler.shouldSample(
+ parentContext,
+ generateRandomTraceId(random),
+ name,
+ spanKind,
+ attributes,
+ parentLinks);
+ if (SamplingDecision.RECORD_AND_SAMPLE.equals(samplingResult.getDecision())) {
+ spanSampledNanos.add(getCurrentTimeNanos());
+ }
+ }
+
+ long timeNow = nanoTime[0];
+ long numSampledSpansInLast5Seconds =
+ spanSampledNanos.stream().filter(x -> x > timeNow - 5000000000L && x <= timeNow).count();
+
+ assertThat(numSampledSpansInLast5Seconds / 5.)
+ .isCloseTo(targetSpansPerSecondLimit, Percentage.withPercentage(5));
+ }
+
+ @Test
+ void testLegacyCase() {
+ // This test makes sure that the issue
+ // https://github.com/open-telemetry/opentelemetry-java-contrib/issues/2007
+ // is resolved.
+
+ long averageRequestRatePerSecond = 10000;
+
+ // Assume the following setup:
+ // The root span is sampled by the legacy sampler AlwaysOn.
+ // One of its descendant spans, which we will call "parent" span, is sampled with
+ // stage1: ConsistentRateLimitingSampler(ConsistentParentBasedSampler, 5000/s).
+ // This will sample approximately 50% of the spans.
+
+ // Its "child" is similarly sampled by
+ // stage2: ConsistentRateLimitingSampler(ConsistentParentBasedSampler, 2500/s).
+
+ // This sampler will generate the same output as the root span described above:
+ // - the threshold will be 0, so all spans will be sampled
+ // - isAdjustedCountReliable will be false
+ // - there will be no threshold in TraceState, but the sampling flag will be set
+ Composable mockRootSampler = new LegacyLikeComposable(ConsistentSampler.alwaysOn());
+
+ double targetSpansPerSecondLimit = 2500; // for stage2
+ double adaptationTimeSeconds = 5;
+
+ // The sampler for "parent" spans
+ ConsistentSampler stage1 =
+ ConsistentSampler.rateLimited(
+ mockRootSampler,
+ 2 * targetSpansPerSecondLimit,
+ adaptationTimeSeconds,
+ nanoTimeSupplier);
+
+ // The sampler for "child" spans (it will never see root spans)
+ ConsistentSampler stage2 =
+ ConsistentSampler.rateLimited(
+ ConsistentSampler.parentBased(ConsistentSampler.alwaysOff()),
+ targetSpansPerSecondLimit,
+ adaptationTimeSeconds,
+ nanoTimeSupplier);
+
+ int numSpans = 1000000;
+ int stage1SampledCount = 0;
+ int stage2SampledCount = 0;
+
+ for (int i = 0; i < numSpans; ++i) {
+ advanceTime(randomInterval(averageRequestRatePerSecond));
+ String traceId = generateRandomTraceId(random);
+
+ // Stage 1 sampling, the "parent"
+ SamplingResult samplingResult1 =
+ stage1.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks);
+
+ boolean isSampled = SamplingDecision.RECORD_AND_SAMPLE.equals(samplingResult1.getDecision());
+ if (isSampled) {
+ stage1SampledCount++;
+ }
+
+ // Prepare the context for the child span, pass parent's TraceState to the child
+ Span parentSpan = Span.fromContext(parentContext);
+ SpanContext parentSpanContext = parentSpan.getSpanContext();
+ TraceState parentSamplingTraceState =
+ samplingResult1.getUpdatedTraceState(parentSpanContext.getTraceState());
+
+ SpanContext childSpanContext =
+ SpanContext.create(
+ traceId,
+ "1000badbadbad000",
+ isSampled ? TraceFlags.getSampled() : TraceFlags.getDefault(),
+ parentSamplingTraceState);
+ Span childSpan = Span.wrap(childSpanContext);
+ Context childContext = childSpan.storeInContext(parentContext);
+
+ // Stage 2 sampling, the "child"
+ SamplingResult samplingResult2 =
+ stage2.shouldSample(childContext, traceId, name, spanKind, attributes, parentLinks);
+
+ if (SamplingDecision.RECORD_AND_SAMPLE.equals(samplingResult2.getDecision())) {
+ stage2SampledCount++;
+ }
+ }
+
+ long timeNow = nanoTime[0];
+ double duration = timeNow / 1000000000.0; // in seconds
+ assertThat(duration)
+ .isCloseTo(numSpans / (double) averageRequestRatePerSecond, Percentage.withPercentage(2));
+
+ assertThat(stage1SampledCount / duration)
+ .isCloseTo(2 * targetSpansPerSecondLimit, Percentage.withPercentage(2));
+
+ assertThat(stage2SampledCount / duration)
+ .isCloseTo(targetSpansPerSecondLimit, Percentage.withPercentage(2));
+ }
+
+ /*
+ * An auxiliary class used to simulate the behavior of a legacy (non consistent-probability)
+ * sampler, just for testing mixed environment
+ */
+ static class LegacyLikeComposable implements Composable {
+
+ private final Composable delegate;
+
+ public LegacyLikeComposable(Composable delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public SamplingIntent getSamplingIntent(
+ Context parentContext,
+ String name,
+ SpanKind spanKind,
+ Attributes attributes,
+ List parentLinks) {
+
+ SamplingIntent delegateIntent =
+ delegate.getSamplingIntent(parentContext, name, spanKind, attributes, parentLinks);
+
+ return new SamplingIntent() {
+ @Override
+ public long getThreshold() {
+ return delegateIntent.getThreshold();
+ }
+
+ @Override
+ public boolean isAdjustedCountReliable() {
+ // Forcing "legacy" behavior, no threshold will be put into TraceState
+ return false;
+ }
+
+ @Override
+ public Attributes getAttributes() {
+ return delegateIntent.getAttributes();
+ }
+
+ @Override
+ public TraceState updateTraceState(TraceState previousState) {
+ return delegateIntent.updateTraceState(previousState);
+ }
+ };
+ }
+
+ @Override
+ public String getDescription() {
+ return "LegacyLike(" + delegate.getDescription() + ")";
+ }
+ }
+
@Test
void testDescription() {
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplerTest.java
index a246e248f..7725bb57a 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplerTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplerTest.java
@@ -37,42 +37,42 @@ private static class Input {
private OptionalLong parentThreshold = OptionalLong.empty();
private OptionalLong parentRandomValue = OptionalLong.empty();
- public void setParentSampled(boolean parentSampled) {
+ void setParentSampled(boolean parentSampled) {
this.parentSampled = parentSampled;
}
- public void setParentThreshold(long parentThreshold) {
+ void setParentThreshold(long parentThreshold) {
assertThat(parentThreshold).isBetween(0L, 0xffffffffffffffL);
this.parentThreshold = OptionalLong.of(parentThreshold);
}
- public void setParentRandomValue(long parentRandomValue) {
+ void setParentRandomValue(long parentRandomValue) {
assertThat(parentRandomValue).isBetween(0L, 0xffffffffffffffL);
this.parentRandomValue = OptionalLong.of(parentRandomValue);
}
- public Context getParentContext() {
+ Context getParentContext() {
return createParentContext(
traceId, spanId, parentThreshold, parentRandomValue, parentSampled);
}
- public static String getTraceId() {
+ static String getTraceId() {
return traceId;
}
- public static String getName() {
+ static String getName() {
return name;
}
- public static SpanKind getSpanKind() {
+ static SpanKind getSpanKind() {
return spanKind;
}
- public static Attributes getAttributes() {
+ static Attributes getAttributes() {
return attributes;
}
- public static List getParentLinks() {
+ static List getParentLinks() {
return parentLinks;
}
}
@@ -87,10 +87,6 @@ private static class Output {
this.parentContext = parentContext;
}
- boolean getSampledFlag() {
- return SamplingDecision.RECORD_AND_SAMPLE.equals(samplingResult.getDecision());
- }
-
OptionalLong getThreshold() {
Span parentSpan = Span.fromContext(parentContext);
OtelTraceState otelTraceState =
@@ -163,7 +159,6 @@ void testMinThresholdWithoutParentRandomValue() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
assertThat(output.getThreshold()).hasValue(0);
assertThat(output.getRandomValue()).isNotPresent();
- assertThat(output.getSampledFlag()).isTrue();
}
@Test
@@ -181,7 +176,6 @@ void testMinThresholdWithParentRandomValue() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
assertThat(output.getThreshold()).hasValue(0);
assertThat(output.getRandomValue()).hasValue(parentRandomValue);
- assertThat(output.getSampledFlag()).isTrue();
}
@Test
@@ -194,9 +188,8 @@ void testMaxThreshold() {
Output output = sample(input, sampler);
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.DROP);
- assertThat(output.getThreshold()).isEmpty();
+ assertThat(output.getThreshold()).isNotPresent();
assertThat(output.getRandomValue()).isNotPresent();
- assertThat(output.getSampledFlag()).isFalse();
}
@Test
@@ -216,7 +209,6 @@ void testParentBasedInConsistentMode() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
assertThat(output.getThreshold()).hasValue(parentRandomValue);
assertThat(output.getRandomValue()).hasValue(parentRandomValue);
- assertThat(output.getSampledFlag()).isTrue();
}
@Test
@@ -232,7 +224,6 @@ void testParentBasedInLegacyMode() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
assertThat(output.getThreshold()).isNotPresent();
assertThat(output.getRandomValue()).isNotPresent();
- assertThat(output.getSampledFlag()).isTrue();
}
@Test
@@ -248,7 +239,6 @@ void testHalfThresholdNotSampled() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.DROP);
assertThat(output.getThreshold()).isNotPresent();
assertThat(output.getRandomValue()).hasValue(0x7FFFFFFFFFFFFFL);
- assertThat(output.getSampledFlag()).isFalse();
}
@Test
@@ -264,7 +254,6 @@ void testHalfThresholdSampled() {
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
assertThat(output.getThreshold()).hasValue(0x80000000000000L);
assertThat(output.getRandomValue()).hasValue(0x80000000000000L);
- assertThat(output.getSampledFlag()).isTrue();
}
@Test
@@ -279,9 +268,7 @@ void testParentViolatingInvariant() {
Output output = sample(input, sampler);
assertThat(output.samplingResult.getDecision()).isEqualTo(SamplingDecision.RECORD_AND_SAMPLE);
-
assertThat(output.getThreshold()).hasValue(0x0L);
assertThat(output.getRandomValue()).hasValue(0x80000000000000L);
- assertThat(output.getSampledFlag()).isTrue();
}
}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplingUtilTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplingUtilTest.java
index fcf2dcd8d..d612f9e0a 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplingUtilTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentSamplingUtilTest.java
@@ -19,7 +19,7 @@
import org.junit.jupiter.api.Test;
-public class ConsistentSamplingUtilTest {
+class ConsistentSamplingUtilTest {
@Test
void testCalculateSamplingProbability() {
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSamplerTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSamplerTest.java
new file mode 100644
index 000000000..90428fad9
--- /dev/null
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/ConsistentVariableThresholdSamplerTest.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.sampler.consistent56;
+
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.calculateThreshold;
+import static io.opentelemetry.contrib.sampler.consistent56.ConsistentSamplingUtil.getMaxThreshold;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.junit.jupiter.api.Test;
+
+class ConsistentVariableThresholdSamplerTest {
+
+ @Test
+ void testSetSamplingProbability() {
+ double probability = 0.5;
+ ConsistentVariableThresholdSampler sampler =
+ new ConsistentVariableThresholdSampler(probability);
+ testSetSamplingProbability(probability, sampler, /* updateProbability= */ false);
+ testSetSamplingProbability(0.25, sampler, /* updateProbability= */ true);
+ testSetSamplingProbability(0.0, sampler, /* updateProbability= */ true);
+ testSetSamplingProbability(1.0, sampler, /* updateProbability= */ true);
+ }
+
+ private static void testSetSamplingProbability(
+ double probability, ConsistentVariableThresholdSampler sampler, boolean updateProbability) {
+ long threshold = calculateThreshold(probability);
+ String thresholdString =
+ ConsistentSamplingUtil.appendLast56BitHexEncodedWithoutTrailingZeros(
+ new StringBuilder(), threshold)
+ .toString();
+ if (threshold == getMaxThreshold()) {
+ thresholdString = "max";
+ }
+ if (updateProbability) {
+ sampler.setSamplingProbability(probability);
+ }
+ assertThat(sampler.getThreshold()).isEqualTo(threshold);
+ assertThat(sampler.getDescription())
+ .isEqualTo(
+ "ConsistentVariableThresholdSampler{threshold="
+ + thresholdString
+ + ", sampling probability="
+ + probability
+ + "}");
+ }
+}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/OtelTraceStateTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/OtelTraceStateTest.java
index a131e9b78..8e8b3ef96 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/OtelTraceStateTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/OtelTraceStateTest.java
@@ -5,71 +5,79 @@
package io.opentelemetry.contrib.sampler.consistent56;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
-public class OtelTraceStateTest {
+class OtelTraceStateTest {
private static String getXString(int len) {
return Stream.generate(() -> "X").limit(len).collect(Collectors.joining());
}
@Test
- public void test() {
+ void test() {
- assertEquals("", OtelTraceState.parse("").serialize());
- assertEquals("", OtelTraceState.parse("").serialize());
+ assertThat(OtelTraceState.parse("").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("").serialize()).isEqualTo("");
- assertEquals("", OtelTraceState.parse("a").serialize());
- assertEquals("", OtelTraceState.parse("#").serialize());
- assertEquals("", OtelTraceState.parse(" ").serialize());
+ assertThat(OtelTraceState.parse("a").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("#").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse(" ").serialize()).isEqualTo("");
- assertEquals("rv:1234567890abcd", OtelTraceState.parse("rv:1234567890abcd").serialize());
- assertEquals("rv:01020304050607", OtelTraceState.parse("rv:01020304050607").serialize());
- assertEquals("", OtelTraceState.parse("rv:1234567890abcde").serialize());
+ assertThat(OtelTraceState.parse("rv:1234567890abcd").serialize())
+ .isEqualTo("rv:1234567890abcd");
+ assertThat(OtelTraceState.parse("rv:01020304050607").serialize())
+ .isEqualTo("rv:01020304050607");
+ assertThat(OtelTraceState.parse("rv:1234567890abcde").serialize()).isEqualTo("");
- assertEquals("th:1234567890abcd", OtelTraceState.parse("th:1234567890abcd").serialize());
- assertEquals("th:01020304050607", OtelTraceState.parse("th:01020304050607").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10000000000000").serialize());
- assertEquals("th:12345", OtelTraceState.parse("th:1234500000000").serialize());
- assertEquals("th:0", OtelTraceState.parse("th:0").serialize()); // TODO
- assertEquals("", OtelTraceState.parse("th:100000000000000").serialize());
- assertEquals("", OtelTraceState.parse("th:1234567890abcde").serialize());
+ assertThat(OtelTraceState.parse("th:1234567890abcd").serialize())
+ .isEqualTo("th:1234567890abcd");
+ assertThat(OtelTraceState.parse("th:01020304050607").serialize())
+ .isEqualTo("th:01020304050607");
+ assertThat(OtelTraceState.parse("th:10000000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1234500000000").serialize()).isEqualTo("th:12345");
+ assertThat(OtelTraceState.parse("th:0").serialize()).isEqualTo("th:0"); // TODO
+ assertThat(OtelTraceState.parse("th:100000000000000").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("th:1234567890abcde").serialize()).isEqualTo("");
- assertEquals(
- "th:1234567890abcd;rv:1234567890abcd;a:" + getXString(214) + ";x:3",
- OtelTraceState.parse("a:" + getXString(214) + ";rv:1234567890abcd;th:1234567890abcd;x:3")
- .serialize());
- assertEquals(
- "",
- OtelTraceState.parse("a:" + getXString(215) + ";rv:1234567890abcd;th:1234567890abcd;x:3")
- .serialize());
+ assertThat(
+ OtelTraceState.parse(
+ "a:" + getXString(214) + ";rv:1234567890abcd;th:1234567890abcd;x:3")
+ .serialize())
+ .isEqualTo("th:1234567890abcd;rv:1234567890abcd;a:" + getXString(214) + ";x:3");
+ assertThat(
+ OtelTraceState.parse(
+ "a:" + getXString(215) + ";rv:1234567890abcd;th:1234567890abcd;x:3")
+ .serialize())
+ .isEqualTo("");
- assertEquals("", OtelTraceState.parse("th:x").serialize());
- assertEquals("", OtelTraceState.parse("th:100000000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10000000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:1000000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:100000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:1000000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:100000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:1000000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:100000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:1000").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:100").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:10").serialize());
- assertEquals("th:1", OtelTraceState.parse("th:1").serialize());
+ assertThat(OtelTraceState.parse("th:x").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("th:100000000000000").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("th:10000000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1000000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:100000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:10000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1000000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:100000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:10000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1000000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:100000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:10000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1000").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:100").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:10").serialize()).isEqualTo("th:1");
+ assertThat(OtelTraceState.parse("th:1").serialize()).isEqualTo("th:1");
- assertEquals("th:10000000000001", OtelTraceState.parse("th:10000000000001").serialize());
- assertEquals("th:1000000000001", OtelTraceState.parse("th:10000000000010").serialize());
- assertEquals("", OtelTraceState.parse("rv:x").serialize());
- assertEquals("", OtelTraceState.parse("rv:100000000000000").serialize());
- assertEquals("rv:10000000000000", OtelTraceState.parse("rv:10000000000000").serialize());
- assertEquals("", OtelTraceState.parse("rv:1000000000000").serialize());
+ assertThat(OtelTraceState.parse("th:10000000000001").serialize())
+ .isEqualTo("th:10000000000001");
+ assertThat(OtelTraceState.parse("th:10000000000010").serialize()).isEqualTo("th:1000000000001");
+ assertThat(OtelTraceState.parse("rv:x").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("rv:100000000000000").serialize()).isEqualTo("");
+ assertThat(OtelTraceState.parse("rv:10000000000000").serialize())
+ .isEqualTo("rv:10000000000000");
+ assertThat(OtelTraceState.parse("rv:1000000000000").serialize()).isEqualTo("");
}
}
diff --git a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/RandomValueGeneratorsTest.java b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/RandomValueGeneratorsTest.java
index ab7d378b6..d9a34255f 100644
--- a/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/RandomValueGeneratorsTest.java
+++ b/consistent-sampling/src/test/java/io/opentelemetry/contrib/sampler/consistent56/RandomValueGeneratorsTest.java
@@ -10,7 +10,7 @@
import org.junit.jupiter.api.Test;
-public class RandomValueGeneratorsTest {
+class RandomValueGeneratorsTest {
@Test
void testRandomRange() {
int attempts = 10000;
diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts
index 5739a81f9..2221dfd27 100644
--- a/dependencyManagement/build.gradle.kts
+++ b/dependencyManagement/build.gradle.kts
@@ -2,8 +2,8 @@ plugins {
`java-platform`
}
-val otelInstrumentationVersion = "2.15.0-alpha"
-val semconvVersion = "1.32.0"
+val otelInstrumentationVersion = "2.20.1-alpha"
+val semconvVersion = "1.37.0"
javaPlatform {
allowDependencies()
@@ -14,21 +14,23 @@ dependencies {
// under JvmTestSuite so they don't show up as runtime dependencies in license and vulnerability scans
// (the constraints section below doesn't have this issue, and will only show up
// as runtime dependencies if they are actually used as runtime dependencies)
- api(enforcedPlatform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:${otelInstrumentationVersion}"))
- api(enforcedPlatform("com.fasterxml.jackson:jackson-bom:2.18.3"))
+ api(platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:${otelInstrumentationVersion}"))
+ api(platform("com.fasterxml.jackson:jackson-bom:2.20.0"))
+ api(platform("com.google.protobuf:protobuf-bom:4.32.1"))
+ api(platform("com.squareup.okhttp3:okhttp-bom:5.1.0"))
constraints {
api("io.opentelemetry.semconv:opentelemetry-semconv:${semconvVersion}")
- api("io.opentelemetry.semconv:opentelemetry-semconv-incubating:${semconvVersion}")
+ api("io.opentelemetry.semconv:opentelemetry-semconv-incubating:${semconvVersion}-alpha")
api("com.google.auto.service:auto-service:1.1.1")
api("com.google.auto.service:auto-service-annotations:1.1.1")
api("com.google.auto.value:auto-value:1.11.0")
api("com.google.auto.value:auto-value-annotations:1.11.0")
- api("com.google.errorprone:error_prone_annotations:2.37.0")
- api("com.google.errorprone:error_prone_core:2.37.0")
+ api("com.google.errorprone:error_prone_annotations:2.42.0")
+ api("com.google.errorprone:error_prone_core:2.42.0")
api("io.github.netmikey.logunit:logunit-jul:2.0.0")
- api("io.opentelemetry.proto:opentelemetry-proto:1.5.0-alpha")
+ api("io.opentelemetry.proto:opentelemetry-proto:1.8.0-alpha")
api("io.prometheus:simpleclient:0.16.0")
api("io.prometheus:simpleclient_common:0.16.0")
api("io.prometheus:simpleclient_httpserver:0.16.0")
@@ -43,19 +45,19 @@ dependencies {
api("com.google.code.findbugs:annotations:3.0.1u2")
api("com.google.code.findbugs:jsr305:3.0.2")
- api("com.squareup.okhttp3:okhttp:4.12.0")
- api("com.uber.nullaway:nullaway:0.12.6")
- api("org.assertj:assertj-core:3.27.3")
+ api("com.uber.nullaway:nullaway:0.12.10")
+ api("org.assertj:assertj-core:3.27.6")
api("org.awaitility:awaitility:4.3.0")
api("org.bouncycastle:bcpkix-jdk15on:1.70")
api("org.junit-pioneer:junit-pioneer:1.9.1")
api("org.skyscreamer:jsonassert:1.5.3")
- api("org.apache.kafka:kafka-clients:3.9.0")
- api("org.testcontainers:kafka:1.20.6")
+ api("org.apache.kafka:kafka-clients:4.1.0")
+ api("org.testcontainers:kafka:1.21.3")
api("com.lmax:disruptor:3.4.4")
api("org.jctools:jctools-core:4.0.5")
- api("tools.profiler:async-profiler:3.0")
+ api("tools.profiler:async-profiler:4.1")
api("com.blogspot.mydailyjava:weak-lock-free:0.18")
api("org.agrona:agrona:1.22.0")
+ api("com.github.f4b6a3:uuid-creator:6.1.1")
}
}
diff --git a/disk-buffering/DESIGN.md b/disk-buffering/DESIGN.md
index 01f6048da..3bd1e3f01 100644
--- a/disk-buffering/DESIGN.md
+++ b/disk-buffering/DESIGN.md
@@ -1,59 +1,62 @@
# Design Overview
-There are three main disk-writing exporters provided by this module:
+The core of disk buffering
+is [SignalStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/SignalStorage.java).
+SignalStorage is an abstraction that defines the bare minimum functionalities needed for
+implementations to allow writing and reading signals.
-* [LogRecordToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordToDiskExporter.java)
-* [MetricToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/MetricToDiskExporter.java)
-* [SpanToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/SpanToDiskExporter.java))
+There is a default implementation per signal that writes serialized signal items to protobuf
+delimited messages into files, where each file's name represents a timestamp of when it was created,
+which will help later to know when it's ready to read, as well as when it's expired. These
+implementations are the following:
-Each is responsible for writing a specific type of telemetry to disk storage for later
-harvest/ingest.
+* [FileSpanStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileSpanStorage.java)
+* [FileLogRecordStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileLogRecordStorage.java)
+* [FileMetricStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileMetricStorage.java)
-For later reading, there are:
+Each one has a `create()` method that takes a destination directory (to store data into) and an
+optional [FileStorageConfiguration](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileStorageConfiguration.java)
+to have a finer control of the storing behavior.
-* [LogRecordFromToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordFromDiskExporter.java)
-* [MetricFromDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/MetricFromDiskExporter.java)
-* [SpanFromDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/SpanFromDiskExporter.java))
+Even
+though [SignalStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/SignalStorage.java)
+can receive signal items directly to be stored in disk, there are convenience exporter
+implementations for each signal that handle the storing process on your behalf. Those are the
+following:
-Each one of those has a `create()` method that takes a delegate exporter (to send data
-to ingest) and the `StorageConfiguration` that tells them where to find buffered data.
+* [SpanToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/SpanToDiskExporter.java)
+* [LogRecordToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/LogRecordToDiskExporter.java)
+* [MetricToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/MetricToDiskExporter.java)
-As explained in the [README](README.md), this has to be triggered manually by the consumer of
-this library and does not happen automatically.
+Each receive their
+respective [SignalStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/SignalStorage.java)
+object to delegate signals to as well as an optional callback object to notify its operations.
## Writing overview

-* The writing process happens automatically within its `export(Collection signals)`
- method, which is called by the configured signal processor.
-* When a set of signals is received, these are delegated over to
- a type-specific wrapper of [ToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/exporter/ToDiskExporter.java)
- class which then serializes them using an implementation
- of [SignalSerializer](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/serialization/serializers/SignalSerializer.java)
- and then the serialized data is appended into a File using an instance of
- the [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java)
- class.
+* Via the convenience toDisk exporters, the writing process happens automatically within their
+ `export(Collection signals)` method, which is called by the configured signal
+ processor.
+* When a set of signals is received, these are delegated over to a type-specific serializer
+ and then the serialized data is appended into a file.
* The data is written into a file directly, without the use of a buffer, to make sure no data gets
lost in case the application ends unexpectedly.
-* Each disk exporter stores its signals in its own folder, which is expected to contain files
+* Each signal storage stores its signals in its own folder, which is expected to contain files
that belong to that type of signal only.
* Each file may contain more than a batch of signals if the configuration parameters allow enough
limit size for it.
* If the configured folder size for the signals has been reached and a new file is needed to be
created to keep storing new data, the oldest available file will be removed to make space for the
new one.
-* The [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java),
- [FolderManager](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java)
- and [WritableFile](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/WritableFile.java)
- files contain more information on the details of the writing process into a file.
## Reading overview

-* The reading process has to be triggered manually by the library consumer as explained in
- the [README](README.md).
+* The reading process has to be triggered manually by the library consumer via the signal storage
+ iterator.
* A single file is read at a time and updated to remove the data gathered from it after it is
successfully exported, until it's emptied. Each file previously created during the
writing process has a timestamp in milliseconds, which is used to determine what file to start
@@ -62,9 +65,3 @@ this library and does not happen automatically.
the time of creating the disk exporter, then it will be ignored, and the next oldest (and
unexpired) one will be used instead.
* All the stale and empty files will be removed as a new file is created.
-* The [Storage](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/Storage.java),
- [FolderManager](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/FolderManager.java)
- and [ReadableFile](src/main/java/io/opentelemetry/contrib/disk/buffering/internal/storage/files/ReadableFile.java)
- files contain more information on the details of the file reading process.
-* Note that the reader delegates the data to the exporter exactly in the way it has received the
- data - it does not try to batch data (but this could be an optimization in the future).
diff --git a/disk-buffering/README.md b/disk-buffering/README.md
index 67dbb1f52..9178794ad 100644
--- a/disk-buffering/README.md
+++ b/disk-buffering/README.md
@@ -1,115 +1,132 @@
# Disk buffering
-This module provides exporters that store telemetry data in files which can be
-sent later on demand. A high level description of how it works is that there are two separate
-processes in place, one for writing data in disk, and one for reading/exporting the previously
-stored data.
+This module provides an abstraction
+named [SignalStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/SignalStorage.java),
+as well as default implementations for each signal type that allow writing signals to disk and
+reading them later.
-* Each exporter stores the received data automatically in disk right after it's received from its
- processor.
-* The reading of the data back from disk and exporting process has to be done manually. At
- the moment there's no automatic mechanism to do so. There's more information on how it can be
- achieved, under [Reading data](#reading-data).
+For a more detailed information on how the whole process works, take a look at
+the [DESIGN.md](DESIGN.md) file.
-> For a more detailed information on how the whole process works, take a look at
-> the [DESIGN.md](DESIGN.md) file.
+## Default implementation usage
-## Configuration
+The default implementations are the following:
-The configurable parameters are provided **per exporter**, the available ones are:
+* [FileSpanStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileSpanStorage.java)
+* [FileLogRecordStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileLogRecordStorage.java)
+* [FileMetricStorage](src/main/java/io/opentelemetry/contrib/disk/buffering/storage/impl/FileMetricStorage.java)
-* Max file size, defaults to 1MB.
-* Max folder size, defaults to 10MB. All files are stored in a single folder per-signal, therefore
- if all 3 types of signals are stored, the total amount of space from disk to be taken by default
- would be of 30MB.
-* Max age for file writing, defaults to 30 seconds.
-* Min age for file reading, defaults to 33 seconds. It must be greater that the max age for file
- writing.
-* Max age for file reading, defaults to 18 hours. After that time passes, the file will be
- considered stale and will be removed when new files are created. No more data will be read from a
- file past this time.
-* An instance
- of [TemporaryFileProvider](src/main/java/io/opentelemetry/contrib/disk/buffering/config/TemporaryFileProvider.java),
- defaults to calling `File.createTempFile`. This provider will be used when reading from the disk
- in order create a temporary file from which each line (batch of signals) will be read and
- sequentially get removed from the original cache file right after the data has been successfully
- exported.
-
-## Usage
+### Set up
-### Storing data
+We need to create a signal storage object per signal type to start writing signals to disk. Each
+`File*Storage` implementation has a `create()` function that receives:
+
+* A File directory to store the signal files. Note that each signal storage object must have a
+ dedicated directory to work properly.
+* (Optional) a configuration object.
-In order to use it, you need to wrap your own exporter with a new instance of
-the ones provided in here:
+The available configuration parameters are the following:
-* For a LogRecordExporter, it must be wrapped within
- a [LogRecordToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordToDiskExporter.java).
-* For a MetricExporter, it must be wrapped within
- a [MetricToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/MetricToDiskExporter.java).
-* For a SpanExporter, it must be wrapped within
- a [SpanToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/SpanToDiskExporter.java).
+* Max file size, defaults to 1MB.
+* Max folder size, defaults to 10MB.
+* Max age for file writing. It sets the time window where a file can get signals appended to it.
+ Defaults to 30 seconds.
+* Min age for file reading. It sets the time to wait before starting to read from a file after
+ its creation. Defaults to 33 seconds. It must be greater that the max age for file writing.
+* Max age for file reading. After that time passes, the file will be considered stale and will be
+ removed when new files are created. No more data will be read from a file past this time. Defaults
+ to 18 hours.
-Each wrapper will need the following when instantiating them:
+```java
+// Root dir
+File rootDir = new File("/some/root");
-* The exporter to be wrapped.
-* A File instance of the root directory where all the data is going to be written. The same root dir
- can be used for all the wrappers, since each will create their own folder inside it.
-* An instance
- of [StorageConfiguration](src/main/java/io/opentelemetry/contrib/disk/buffering/config/StorageConfiguration.java)
- with the desired parameters. You can create one with default values by
- calling `StorageConfiguration.getDefault()`.
+// Setting up span storage
+SignalStorage.Span spanStorage = FileSpanStorage.create(new File(rootDir, "spans"));
-After wrapping your exporters, you must register the wrapper as the exporter you'll use. It will
-take care of always storing the data it receives.
+// Setting up metric storage
+SignalStorage.Metric metricStorage = FileMetricStorage.create(new File(rootDir, "metrics"));
-#### Set up example for spans
+// Setting up log storage
+SignalStorage.LogRecord logStorage = FileLogRecordStorage.create(new File(rootDir, "logs"));
+```
-### Writing data
+### Storing data
-The data is written in the disk by "ToDisk" exporters, these are exporters that serialize and store the data as received by their processors. If for some reason
-the "ToDisk" cannot store data in the disk, they'll delegate the data to their wrapped exporter.
+While you could manually call your `SignalStorage.write(items)` function, disk buffering
+provides convenience exporters that you can use in your OpenTelemetry's instance, so
+that all signals are automatically stored as they are created.
-```java
-// Creating the SpanExporter of our choice.
-SpanExporter mySpanExporter = OtlpGrpcSpanExporter.getDefault();
+* For a span storage, use
+ a [SpanToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/SpanToDiskExporter.java).
+* For a log storage, use
+ a [LogRecordToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/LogRecordToDiskExporter.java).
+* For a metric storage, use
+ a [MetricToDiskExporter](src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/MetricToDiskExporter.java).
-// Wrapping our exporter with its "ToDisk" exporter.
-SpanToDiskExporter toDiskExporter = SpanToDiskExporter.create(mySpanExporter, StorageConfiguration.getDefault(new File("/my/signals/cache/dir")));
+Each will wrap a signal storage for its respective signal type, as well as an optional callback
+to notify when it succeeds, fails, and gets shutdown.
- // Registering the disk exporter within our OpenTelemetry instance.
-SdkTracerProvider myTraceProvider = SdkTracerProvider.builder()
- .addSpanProcessor(SimpleSpanProcessor.create(toDiskExporter))
+```java
+// Setting up span to disk exporter
+SpanToDiskExporter spanToDiskExporter =
+ SpanToDiskExporter.builder(spanStorage).setExporterCallback(spanCallback).build();
+// Setting up metric to disk
+MetricToDiskExporter metricToDiskExporter =
+ MetricToDiskExporter.builder(metricStorage).setExporterCallback(metricCallback).build();
+// Setting up log to disk exporter
+LogRecordToDiskExporter logToDiskExporter =
+ LogRecordToDiskExporter.builder(logStorage).setExporterCallback(logCallback).build();
+
+// Using exporters in your OpenTelemetry instance.
+OpenTelemetry openTelemetry =
+ OpenTelemetrySdk.builder()
+ // Using span to disk exporter
+ .setTracerProvider(
+ SdkTracerProvider.builder()
+ .addSpanProcessor(BatchSpanProcessor.builder(spanToDiskExporter).build())
+ .build())
+ // Using log to disk exporter
+ .setLoggerProvider(
+ SdkLoggerProvider.builder()
+ .addLogRecordProcessor(
+ BatchLogRecordProcessor.builder(logToDiskExporter).build())
+ .build())
+ // Using metric to disk exporter
+ .setMeterProvider(
+ SdkMeterProvider.builder()
+ .registerMetricReader(PeriodicMetricReader.create(metricToDiskExporter))
+ .build())
.build();
-OpenTelemetrySdk.builder()
- .setTracerProvider(myTraceProvider)
- .buildAndRegisterGlobal();
-
```
+Now when creating signals using your `OpenTelemetry` instance, those will get stored in disk.
+
### Reading data
-In order to read data, we need to create "FromDisk" exporters, which read data from the disk, parse it and delegate it
-to their wrapped exporters.
+In order to read data, we can iterate through our signal storage objects and then forward them to
+a network exporter, as shown in the example for spans below.
```java
-try {
- SpanFromDiskExporter fromDiskExporter = SpanFromDiskExporter.create(memorySpanExporter, storageConfig);
- if(fromDiskExporter.exportStoredBatch(1, TimeUnit.SECONDS)) {
- // A batch was successfully exported and removed from disk. You can call this method for as long as it keeps returning true.
- } else {
- // Either there was no data in the disk or the wrapped exporter returned CompletableResultCode.ofFailure().
- }
-} catch (IOException e) {
- // Something unexpected happened.
+// Example of reading an exporting spans from disk
+OtlpHttpSpanExporter networkExporter;
+Iterator> spanCollections = spanStorage.iterator();
+while(spanCollections.hasNext()){
+ networkExporter.export(spanCollections.next());
}
```
+The `File*Storage` iterators delete the previously returned collection when `next()` is called,
+assuming that if the next collection is requested is because the previous one was successfully
+consumed.
+
Both the writing and reading processes can run in parallel and they don't overlap
because each is supposed to happen in different files. We ensure that reader and writer don't
-accidentally meet in the same file by using the configurable parameters. These parameters set non-overlapping time frames for each action to be done on a single file at a time. On top of that, there's a mechanism in
-place to avoid overlapping on edge cases where the time frames ended but the resources haven't been
-released. For that mechanism to work properly, this tool assumes that both the reading and the
-writing actions are executed within the same application process.
+accidentally meet in the same file by using the configurable parameters. These parameters set
+non-overlapping time frames for each action to be done on a single file at a time. On top of that,
+there's a mechanism in place to avoid overlapping on edge cases where the time frames ended but the
+resources haven't been released. For that mechanism to work properly, this tool assumes that both
+the reading and the writing actions are executed within the same application process.
## Component owners
diff --git a/disk-buffering/assets/reading-flow.png b/disk-buffering/assets/reading-flow.png
index 76b8de438..63750e5a3 100644
Binary files a/disk-buffering/assets/reading-flow.png and b/disk-buffering/assets/reading-flow.png differ
diff --git a/disk-buffering/assets/writing-flow.png b/disk-buffering/assets/writing-flow.png
index c6144b301..b4b21359d 100644
Binary files a/disk-buffering/assets/writing-flow.png and b/disk-buffering/assets/writing-flow.png differ
diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts
index edc8e862c..c36d73588 100644
--- a/disk-buffering/build.gradle.kts
+++ b/disk-buffering/build.gradle.kts
@@ -1,48 +1,29 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
-import ru.vyarus.gradle.plugin.animalsniffer.AnimalSniffer
plugins {
id("otel.java-conventions")
id("otel.publish-conventions")
- id("com.github.johnrengelman.shadow")
+ id("otel.animalsniffer-conventions")
+ id("com.gradleup.shadow")
id("me.champeau.jmh") version "0.7.3"
- id("ru.vyarus.animalsniffer") version "2.0.0"
- id("com.squareup.wire") version "5.3.1"
+ id("com.squareup.wire") version "5.4.0"
}
description = "Exporter implementations that store signals on disk"
otelJava.moduleName.set("io.opentelemetry.contrib.exporters.disk")
-java {
- sourceCompatibility = JavaVersion.VERSION_1_8
- targetCompatibility = JavaVersion.VERSION_1_8
-}
-
val protos by configurations.creating
dependencies {
api("io.opentelemetry:opentelemetry-sdk")
+ implementation("io.opentelemetry:opentelemetry-api-incubator")
+ implementation("io.opentelemetry:opentelemetry-exporter-otlp-common")
compileOnly("com.google.auto.value:auto-value-annotations")
annotationProcessor("com.google.auto.value:auto-value")
- signature("com.toasttab.android:gummy-bears-api-21:0.6.1:coreLib@signature")
testImplementation("org.mockito:mockito-inline")
testImplementation("io.opentelemetry:opentelemetry-sdk-testing")
- protos("io.opentelemetry.proto:opentelemetry-proto:1.5.0-alpha@jar")
-}
-
-animalsniffer {
- sourceSets = listOf(java.sourceSets.main.get())
-}
-
-// Always having declared output makes this task properly participate in tasks up-to-date checks
-tasks.withType {
- reports.text.required.set(true)
-}
-
-// Attaching animalsniffer check to the compilation process.
-tasks.named("classes").configure {
- finalizedBy("animalsnifferMain")
+ protos("io.opentelemetry.proto:opentelemetry-proto:1.8.0-alpha@jar")
}
jmh {
@@ -67,15 +48,17 @@ wire {
}
root(
- "opentelemetry.proto.trace.v1.TracesData",
- "opentelemetry.proto.metrics.v1.MetricsData",
- "opentelemetry.proto.logs.v1.LogsData",
+ // These are the types used by the Java SDK's OTLP exporters.
+ "opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest",
+ "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest",
+ "opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest",
)
}
tasks.named("shadowJar") {
archiveClassifier.set("")
- configurations = emptyList() // To avoid embedding any dependencies as we only need to rename some local packages.
+ configurations =
+ emptyList() // To avoid embedding any dependencies as we only need to rename some local packages.
relocate("io.opentelemetry.proto", "io.opentelemetry.diskbuffering.proto")
mustRunAfter("jar")
}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordFromDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordFromDiskExporter.java
deleted file mode 100644
index 7b37ee361..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordFromDiskExporter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporterImpl;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.deserializers.SignalDeserializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.logs.data.LogRecordData;
-import io.opentelemetry.sdk.logs.export.LogRecordExporter;
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-public class LogRecordFromDiskExporter implements FromDiskExporter {
-
- private final FromDiskExporterImpl delegate;
-
- public static LogRecordFromDiskExporter create(
- LogRecordExporter exporter, StorageConfiguration config) throws IOException {
- FromDiskExporterImpl delegate =
- FromDiskExporterImpl.builder()
- .setFolderName(SignalTypes.logs.name())
- .setStorageConfiguration(config)
- .setDeserializer(SignalDeserializer.ofLogs())
- .setExportFunction(exporter::export)
- .setDebugEnabled(config.isDebugEnabled())
- .build();
- return new LogRecordFromDiskExporter(delegate);
- }
-
- private LogRecordFromDiskExporter(FromDiskExporterImpl delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException {
- return delegate.exportStoredBatch(timeout, unit);
- }
-
- @Override
- public void shutdown() throws IOException {
- delegate.shutdown();
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordToDiskExporter.java
deleted file mode 100644
index 7570aed8e..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/LogRecordToDiskExporter.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.ToDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.common.CompletableResultCode;
-import io.opentelemetry.sdk.logs.data.LogRecordData;
-import io.opentelemetry.sdk.logs.export.LogRecordExporter;
-import java.io.IOException;
-import java.util.Collection;
-
-/**
- * This class implements a {@link LogRecordExporter} that delegates to an instance of {@code
- * ToDiskExporter}.
- */
-public class LogRecordToDiskExporter implements LogRecordExporter {
- private final ToDiskExporter delegate;
-
- /**
- * Creates a new LogRecordToDiskExporter that will buffer LogRecordData telemetry on disk storage.
- *
- * @param delegate - The LogRecordExporter to delegate to if disk writing fails.
- * @param config - The StorageConfiguration that specifies how storage is managed.
- * @return A new LogRecordToDiskExporter instance.
- * @throws IOException if the delegate ToDiskExporter could not be created.
- */
- public static LogRecordToDiskExporter create(
- LogRecordExporter delegate, StorageConfiguration config) throws IOException {
- ToDiskExporter toDisk =
- ToDiskExporter.builder()
- .setFolderName(SignalTypes.logs.name())
- .setStorageConfiguration(config)
- .setSerializer(SignalSerializer.ofLogs())
- .setExportFunction(delegate::export)
- .build();
- return new LogRecordToDiskExporter(toDisk);
- }
-
- // Visible for testing
- LogRecordToDiskExporter(ToDiskExporter delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public CompletableResultCode export(Collection logs) {
- return delegate.export(logs);
- }
-
- @Override
- public CompletableResultCode flush() {
- return CompletableResultCode.ofSuccess();
- }
-
- @Override
- public CompletableResultCode shutdown() {
- try {
- delegate.shutdown();
- return CompletableResultCode.ofSuccess();
- } catch (IOException e) {
- return CompletableResultCode.ofFailure();
- }
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricFromDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricFromDiskExporter.java
deleted file mode 100644
index bf652f8f8..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricFromDiskExporter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporterImpl;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.deserializers.SignalDeserializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.metrics.data.MetricData;
-import io.opentelemetry.sdk.metrics.export.MetricExporter;
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-public class MetricFromDiskExporter implements FromDiskExporter {
-
- private final FromDiskExporterImpl delegate;
-
- public static MetricFromDiskExporter create(MetricExporter exporter, StorageConfiguration config)
- throws IOException {
- FromDiskExporterImpl delegate =
- FromDiskExporterImpl.builder()
- .setFolderName(SignalTypes.metrics.name())
- .setStorageConfiguration(config)
- .setDeserializer(SignalDeserializer.ofMetrics())
- .setExportFunction(exporter::export)
- .setDebugEnabled(config.isDebugEnabled())
- .build();
- return new MetricFromDiskExporter(delegate);
- }
-
- private MetricFromDiskExporter(FromDiskExporterImpl delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException {
- return delegate.exportStoredBatch(timeout, unit);
- }
-
- @Override
- public void shutdown() throws IOException {
- delegate.shutdown();
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricToDiskExporter.java
deleted file mode 100644
index bf2e7066f..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/MetricToDiskExporter.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.ToDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.common.CompletableResultCode;
-import io.opentelemetry.sdk.metrics.InstrumentType;
-import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
-import io.opentelemetry.sdk.metrics.data.MetricData;
-import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector;
-import io.opentelemetry.sdk.metrics.export.MetricExporter;
-import java.io.IOException;
-import java.util.Collection;
-
-/**
- * This class implements a {@link MetricExporter} that delegates to an instance of {@code
- * ToDiskExporter}.
- */
-public class MetricToDiskExporter implements MetricExporter {
-
- private final ToDiskExporter delegate;
- private final AggregationTemporalitySelector aggregationTemporalitySelector;
-
- /**
- * Creates a new MetricToDiskExporter that will buffer Metric telemetry on disk storage.
- *
- * @param delegate - The MetricExporter to delegate to if disk writing fails.
- * @param config - The StorageConfiguration that specifies how storage is managed.
- * @return A new MetricToDiskExporter instance.
- * @throws IOException if the delegate ToDiskExporter could not be created.
- */
- public static MetricToDiskExporter create(MetricExporter delegate, StorageConfiguration config)
- throws IOException {
- ToDiskExporter toDisk =
- ToDiskExporter.builder()
- .setFolderName(SignalTypes.metrics.name())
- .setStorageConfiguration(config)
- .setSerializer(SignalSerializer.ofMetrics())
- .setExportFunction(delegate::export)
- .build();
- return new MetricToDiskExporter(toDisk, delegate);
- }
-
- // VisibleForTesting
- MetricToDiskExporter(
- ToDiskExporter delegate, AggregationTemporalitySelector selector) {
- this.delegate = delegate;
- this.aggregationTemporalitySelector = selector;
- }
-
- @Override
- public CompletableResultCode export(Collection metrics) {
- return delegate.export(metrics);
- }
-
- @Override
- public CompletableResultCode flush() {
- return CompletableResultCode.ofSuccess();
- }
-
- @Override
- public CompletableResultCode shutdown() {
- try {
- delegate.shutdown();
- } catch (IOException e) {
- return CompletableResultCode.ofFailure();
- }
- return CompletableResultCode.ofSuccess();
- }
-
- @Override
- public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) {
- return aggregationTemporalitySelector.getAggregationTemporality(instrumentType);
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanFromDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanFromDiskExporter.java
deleted file mode 100644
index c23ac043e..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanFromDiskExporter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.FromDiskExporterImpl;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.deserializers.SignalDeserializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.trace.data.SpanData;
-import io.opentelemetry.sdk.trace.export.SpanExporter;
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-public class SpanFromDiskExporter implements FromDiskExporter {
-
- private final FromDiskExporterImpl delegate;
-
- public static SpanFromDiskExporter create(SpanExporter exporter, StorageConfiguration config)
- throws IOException {
- FromDiskExporterImpl delegate =
- FromDiskExporterImpl.builder()
- .setFolderName(SignalTypes.spans.name())
- .setStorageConfiguration(config)
- .setDeserializer(SignalDeserializer.ofSpans())
- .setExportFunction(exporter::export)
- .setDebugEnabled(config.isDebugEnabled())
- .build();
- return new SpanFromDiskExporter(delegate);
- }
-
- private SpanFromDiskExporter(FromDiskExporterImpl delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public boolean exportStoredBatch(long timeout, TimeUnit unit) throws IOException {
- return delegate.exportStoredBatch(timeout, unit);
- }
-
- @Override
- public void shutdown() throws IOException {
- delegate.shutdown();
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanToDiskExporter.java
deleted file mode 100644
index d64a4cd71..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/SpanToDiskExporter.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering;
-
-import io.opentelemetry.contrib.disk.buffering.config.StorageConfiguration;
-import io.opentelemetry.contrib.disk.buffering.internal.exporter.ToDiskExporter;
-import io.opentelemetry.contrib.disk.buffering.internal.serialization.serializers.SignalSerializer;
-import io.opentelemetry.contrib.disk.buffering.internal.utils.SignalTypes;
-import io.opentelemetry.sdk.common.CompletableResultCode;
-import io.opentelemetry.sdk.trace.data.SpanData;
-import io.opentelemetry.sdk.trace.export.SpanExporter;
-import java.io.IOException;
-import java.util.Collection;
-
-/**
- * This class implements a SpanExporter that delegates to an instance of {@code
- * ToDiskExporter}.
- */
-public class SpanToDiskExporter implements SpanExporter {
-
- private final ToDiskExporter delegate;
-
- /**
- * Creates a new SpanToDiskExporter that will buffer Span telemetry on disk storage.
- *
- * @param delegate - The SpanExporter to delegate to if disk writing fails.
- * @param config - The StorageConfiguration that specifies how storage is managed.
- * @return A new SpanToDiskExporter instance.
- * @throws IOException if the delegate ToDiskExporter could not be created.
- */
- public static SpanToDiskExporter create(SpanExporter delegate, StorageConfiguration config)
- throws IOException {
- ToDiskExporter toDisk =
- ToDiskExporter.builder()
- .setFolderName(SignalTypes.spans.name())
- .setStorageConfiguration(config)
- .setSerializer(SignalSerializer.ofSpans())
- .setExportFunction(delegate::export)
- .build();
- return new SpanToDiskExporter(toDisk);
- }
-
- // Visible for testing
- SpanToDiskExporter(ToDiskExporter delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public CompletableResultCode export(Collection spans) {
- return delegate.export(spans);
- }
-
- @Override
- public CompletableResultCode flush() {
- return CompletableResultCode.ofSuccess();
- }
-
- @Override
- public CompletableResultCode shutdown() {
- try {
- delegate.shutdown();
- } catch (IOException e) {
- return CompletableResultCode.ofFailure();
- }
- return CompletableResultCode.ofSuccess();
- }
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/config/TemporaryFileProvider.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/config/TemporaryFileProvider.java
deleted file mode 100644
index 3cf803f9f..000000000
--- a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/config/TemporaryFileProvider.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright The OpenTelemetry Authors
- * SPDX-License-Identifier: Apache-2.0
- */
-
-package io.opentelemetry.contrib.disk.buffering.config;
-
-import java.io.File;
-import java.io.IOException;
-
-/** Provides a temporary file needed to do the disk reading process. */
-public interface TemporaryFileProvider {
-
- /**
- * Creates a temporary file.
- *
- * @param prefix The prefix for the provided file name.
- */
- File createTemporaryFile(String prefix) throws IOException;
-}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/LogRecordToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/LogRecordToDiskExporter.java
new file mode 100644
index 000000000..6ed7ae2b4
--- /dev/null
+++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/LogRecordToDiskExporter.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.disk.buffering.exporters;
+
+import com.google.errorprone.annotations.CanIgnoreReturnValue;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.ExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.NoopExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.internal.exporters.SignalStorageExporter;
+import io.opentelemetry.contrib.disk.buffering.storage.SignalStorage;
+import io.opentelemetry.sdk.common.CompletableResultCode;
+import io.opentelemetry.sdk.logs.data.LogRecordData;
+import io.opentelemetry.sdk.logs.export.LogRecordExporter;
+import java.time.Duration;
+import java.util.Collection;
+
+/** Exporter that stores logs into disk. */
+public final class LogRecordToDiskExporter implements LogRecordExporter {
+ private final SignalStorageExporter storageExporter;
+ private final ExporterCallback callback;
+ private static final ExporterCallback DEFAULT_CALLBACK =
+ new NoopExporterCallback<>();
+ private static final Duration DEFAULT_EXPORT_TIMEOUT = Duration.ofSeconds(10);
+
+ private LogRecordToDiskExporter(
+ SignalStorageExporter storageExporter,
+ ExporterCallback callback) {
+ this.storageExporter = storageExporter;
+ this.callback = callback;
+ }
+
+ public static Builder builder(SignalStorage.LogRecord storage) {
+ return new Builder(storage);
+ }
+
+ @Override
+ public CompletableResultCode export(Collection logs) {
+ return storageExporter.exportToStorage(logs);
+ }
+
+ @Override
+ public CompletableResultCode flush() {
+ return CompletableResultCode.ofSuccess();
+ }
+
+ @Override
+ public CompletableResultCode shutdown() {
+ callback.onShutdown();
+ return CompletableResultCode.ofSuccess();
+ }
+
+ public static final class Builder {
+ private final SignalStorage.LogRecord storage;
+ private ExporterCallback callback = DEFAULT_CALLBACK;
+ private Duration writeTimeout = DEFAULT_EXPORT_TIMEOUT;
+
+ @CanIgnoreReturnValue
+ public Builder setExporterCallback(ExporterCallback value) {
+ callback = value;
+ return this;
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setWriteTimeout(Duration value) {
+ writeTimeout = value;
+ return this;
+ }
+
+ public LogRecordToDiskExporter build() {
+ SignalStorageExporter storageExporter =
+ new SignalStorageExporter<>(storage, callback, writeTimeout);
+ return new LogRecordToDiskExporter(storageExporter, callback);
+ }
+
+ private Builder(SignalStorage.LogRecord storage) {
+ this.storage = storage;
+ }
+ }
+}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/MetricToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/MetricToDiskExporter.java
new file mode 100644
index 000000000..fe7a86abf
--- /dev/null
+++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/MetricToDiskExporter.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.disk.buffering.exporters;
+
+import com.google.errorprone.annotations.CanIgnoreReturnValue;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.ExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.NoopExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.internal.exporters.SignalStorageExporter;
+import io.opentelemetry.contrib.disk.buffering.storage.SignalStorage;
+import io.opentelemetry.sdk.common.CompletableResultCode;
+import io.opentelemetry.sdk.metrics.InstrumentType;
+import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
+import io.opentelemetry.sdk.metrics.data.MetricData;
+import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector;
+import io.opentelemetry.sdk.metrics.export.MetricExporter;
+import java.time.Duration;
+import java.util.Collection;
+
+/** Exporter that stores metrics into disk. */
+public final class MetricToDiskExporter implements MetricExporter {
+ private final SignalStorageExporter storageExporter;
+ private final AggregationTemporalitySelector aggregationTemporalitySelector;
+ private final ExporterCallback callback;
+ private static final ExporterCallback DEFAULT_CALLBACK = new NoopExporterCallback<>();
+ private static final Duration DEFAULT_EXPORT_TIMEOUT = Duration.ofSeconds(10);
+
+ private MetricToDiskExporter(
+ SignalStorageExporter storageExporter,
+ AggregationTemporalitySelector aggregationTemporalitySelector,
+ ExporterCallback callback) {
+ this.storageExporter = storageExporter;
+ this.aggregationTemporalitySelector = aggregationTemporalitySelector;
+ this.callback = callback;
+ }
+
+ public static Builder builder(SignalStorage.Metric storage) {
+ return new Builder(storage);
+ }
+
+ @Override
+ public CompletableResultCode export(Collection metrics) {
+ return storageExporter.exportToStorage(metrics);
+ }
+
+ @Override
+ public CompletableResultCode flush() {
+ return CompletableResultCode.ofSuccess();
+ }
+
+ @Override
+ public CompletableResultCode shutdown() {
+ callback.onShutdown();
+ return CompletableResultCode.ofSuccess();
+ }
+
+ @Override
+ public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) {
+ return aggregationTemporalitySelector.getAggregationTemporality(instrumentType);
+ }
+
+ public static final class Builder {
+ private final SignalStorage.Metric storage;
+ private AggregationTemporalitySelector aggregationTemporalitySelector =
+ AggregationTemporalitySelector.alwaysCumulative();
+ private ExporterCallback callback = DEFAULT_CALLBACK;
+ private Duration writeTimeout = DEFAULT_EXPORT_TIMEOUT;
+
+ @CanIgnoreReturnValue
+ public Builder setExporterCallback(ExporterCallback value) {
+ callback = value;
+ return this;
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setWriteTimeout(Duration value) {
+ writeTimeout = value;
+ return this;
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setAggregationTemporalitySelector(AggregationTemporalitySelector value) {
+ aggregationTemporalitySelector = value;
+ return this;
+ }
+
+ public MetricToDiskExporter build() {
+ SignalStorageExporter storageExporter =
+ new SignalStorageExporter<>(storage, callback, writeTimeout);
+ return new MetricToDiskExporter(storageExporter, aggregationTemporalitySelector, callback);
+ }
+
+ private Builder(SignalStorage.Metric storage) {
+ this.storage = storage;
+ }
+ }
+}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/SpanToDiskExporter.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/SpanToDiskExporter.java
new file mode 100644
index 000000000..9558a2767
--- /dev/null
+++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/SpanToDiskExporter.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.disk.buffering.exporters;
+
+import com.google.errorprone.annotations.CanIgnoreReturnValue;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.ExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.exporters.callback.NoopExporterCallback;
+import io.opentelemetry.contrib.disk.buffering.internal.exporters.SignalStorageExporter;
+import io.opentelemetry.contrib.disk.buffering.storage.SignalStorage;
+import io.opentelemetry.sdk.common.CompletableResultCode;
+import io.opentelemetry.sdk.trace.data.SpanData;
+import io.opentelemetry.sdk.trace.export.SpanExporter;
+import java.time.Duration;
+import java.util.Collection;
+
+/** Exporter that stores spans into disk. */
+public final class SpanToDiskExporter implements SpanExporter {
+ private final SignalStorageExporter storageExporter;
+ private final ExporterCallback callback;
+ private static final ExporterCallback DEFAULT_CALLBACK = new NoopExporterCallback<>();
+ private static final Duration DEFAULT_EXPORT_TIMEOUT = Duration.ofSeconds(10);
+
+ private SpanToDiskExporter(
+ SignalStorageExporter storageExporter, ExporterCallback callback) {
+ this.storageExporter = storageExporter;
+ this.callback = callback;
+ }
+
+ public static Builder builder(SignalStorage.Span storage) {
+ return new Builder(storage);
+ }
+
+ @Override
+ public CompletableResultCode export(Collection spans) {
+ return storageExporter.exportToStorage(spans);
+ }
+
+ @Override
+ public CompletableResultCode flush() {
+ return CompletableResultCode.ofSuccess();
+ }
+
+ @Override
+ public CompletableResultCode shutdown() {
+ callback.onShutdown();
+ return CompletableResultCode.ofSuccess();
+ }
+
+ public static final class Builder {
+ private final SignalStorage.Span storage;
+ private ExporterCallback callback = DEFAULT_CALLBACK;
+ private Duration writeTimeout = DEFAULT_EXPORT_TIMEOUT;
+
+ private Builder(SignalStorage.Span storage) {
+ this.storage = storage;
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setExporterCallback(ExporterCallback value) {
+ callback = value;
+ return this;
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setWriteTimeout(Duration value) {
+ writeTimeout = value;
+ return this;
+ }
+
+ public SpanToDiskExporter build() {
+ SignalStorageExporter storageExporter =
+ new SignalStorageExporter<>(storage, callback, writeTimeout);
+ return new SpanToDiskExporter(storageExporter, callback);
+ }
+ }
+}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/ExporterCallback.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/ExporterCallback.java
new file mode 100644
index 000000000..9c3c816ea
--- /dev/null
+++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/ExporterCallback.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.disk.buffering.exporters.callback;
+
+import java.util.Collection;
+import javax.annotation.Nullable;
+
+/** Notifies about exporter and storage-related operations from within a signal to disk exporter. */
+public interface ExporterCallback {
+ /**
+ * Called when an export to disk operation succeeded.
+ *
+ * @param items The items successfully stored in disk.
+ */
+ void onExportSuccess(Collection items);
+
+ /**
+ * Called when an export to disk operation failed.
+ *
+ * @param items The items that couldn't get stored in disk.
+ * @param error Optional - provides more information of why the operation failed.
+ */
+ void onExportError(Collection items, @Nullable Throwable error);
+
+ /** Called when the exporter is closed. */
+ void onShutdown();
+}
diff --git a/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/NoopExporterCallback.java b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/NoopExporterCallback.java
new file mode 100644
index 000000000..6313d1a5b
--- /dev/null
+++ b/disk-buffering/src/main/java/io/opentelemetry/contrib/disk/buffering/exporters/callback/NoopExporterCallback.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright The OpenTelemetry Authors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package io.opentelemetry.contrib.disk.buffering.exporters.callback;
+
+import java.util.Collection;
+import javax.annotation.Nullable;
+
+public final class NoopExporterCallback implements ExporterCallback