Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
refactor
  • Loading branch information
RulaKhaled committed Dec 2, 2025
commit c0a0f25d64141fd6e1ad0298250fd7727b1e2709
43 changes: 18 additions & 25 deletions packages/core/src/tracing/openai/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { getCurrentScope } from '../../currentScopes';
import { getClient } from '../../currentScopes';
import { captureException } from '../../exports';
import { SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '../../semanticAttributes';
import { SPAN_STATUS_ERROR } from '../../tracing';
Expand All @@ -20,12 +20,10 @@ import {
GEN_AI_SYSTEM_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
import { getTruncatedJsonString } from '../ai/utils';
import { OPENAI_INTEGRATION_NAME } from './constants';
import { instrumentStream } from './streaming';
import type {
ChatCompletionChunk,
InstrumentedMethod,
OpenAiIntegration,
OpenAiOptions,
OpenAiResponse,
OpenAIStream,
Expand Down Expand Up @@ -128,18 +126,6 @@ function addRequestAttributes(span: Span, params: Record<string, unknown>): void
}
}

function getOptionsFromIntegration(): OpenAiOptions {
const scope = getCurrentScope();
const client = scope.getClient();
const integration = client?.getIntegrationByName<OpenAiIntegration>(OPENAI_INTEGRATION_NAME);
const shouldRecordInputsAndOutputs = integration ? Boolean(client?.getOptions().sendDefaultPii) : false;

return {
recordInputs: integration?.options?.recordInputs ?? shouldRecordInputsAndOutputs,
recordOutputs: integration?.options?.recordOutputs ?? shouldRecordInputsAndOutputs,
};
}

/**
* Instrument a method with Sentry spans
* Following Sentry AI Agents Manual Instrumentation conventions
Expand All @@ -149,10 +135,9 @@ function instrumentMethod<T extends unknown[], R>(
originalMethod: (...args: T) => Promise<R>,
methodPath: InstrumentedMethod,
context: unknown,
options?: OpenAiOptions,
options: OpenAiOptions,
): (...args: T) => Promise<R> {
return async function instrumentedMethod(...args: T): Promise<R> {
const finalOptions = options || getOptionsFromIntegration();
const requestAttributes = extractRequestAttributes(args, methodPath);
const model = (requestAttributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] as string) || 'unknown';
const operationName = getOperationName(methodPath);
Expand All @@ -170,16 +155,16 @@ function instrumentMethod<T extends unknown[], R>(
},
async (span: Span) => {
try {
if (finalOptions.recordInputs && args[0] && typeof args[0] === 'object') {
addRequestAttributes(span, args[0] as Record<string, unknown>);
if (options.recordInputs && params) {
addRequestAttributes(span, params);
}

Comment on lines +158 to 161
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Removal of runtime type checking for args[0] allows primitive values to be passed to addRequestAttributes, causing a TypeError.
Severity: CRITICAL | Confidence: High

🔍 Detailed Analysis

The addRequestAttributes function will throw a TypeError: Cannot use 'in' operator to search for 'messages' in [primitive] if params is a primitive value (e.g., number, string, boolean). This occurs because the new code removes the typeof args[0] === 'object' runtime check, allowing primitive values from args[0] to be assigned to params and passed to addRequestAttributes. The if (options.recordInputs && params) check only verifies truthiness, not type, leading to the crash when the in operator is used on a primitive. This affects both streaming and non-streaming code paths.

💡 Suggested Fix

Reinstate the runtime type check typeof args[0] === 'object' before assigning args[0] to params and passing it to addRequestAttributes to ensure params is always an object.

🤖 Prompt for AI Agent
Review the code at the location below. A potential bug has been identified by an AI
agent.
Verify if this is a real issue. If it is, propose a fix; if not, explain why it's not
valid.

Location: packages/core/src/tracing/openai/index.ts#L158-L161

Potential issue: The `addRequestAttributes` function will throw a `TypeError: Cannot use
'in' operator to search for 'messages' in [primitive]` if `params` is a primitive value
(e.g., number, string, boolean). This occurs because the new code removes the `typeof
args[0] === 'object'` runtime check, allowing primitive values from `args[0]` to be
assigned to `params` and passed to `addRequestAttributes`. The `if (options.recordInputs
&& params)` check only verifies truthiness, not type, leading to the crash when the `in`
operator is used on a primitive. This affects both streaming and non-streaming code
paths.

Did we get this right? 👍 / 👎 to inform future reviews.
Reference ID: 5159655

const result = await originalMethod.apply(context, args);

return instrumentStream(
result as OpenAIStream<ChatCompletionChunk | ResponseStreamingEvent>,
span,
finalOptions.recordOutputs ?? false,
options.recordOutputs ?? false,
) as unknown as R;
} catch (error) {
// For streaming requests that fail before stream creation, we still want to record
Expand Down Expand Up @@ -209,12 +194,12 @@ function instrumentMethod<T extends unknown[], R>(
},
async (span: Span) => {
try {
if (finalOptions.recordInputs && args[0] && typeof args[0] === 'object') {
addRequestAttributes(span, args[0] as Record<string, unknown>);
if (options.recordInputs && params) {
addRequestAttributes(span, params);
}

const result = await originalMethod.apply(context, args);
addResponseAttributes(span, result, finalOptions.recordOutputs);
addResponseAttributes(span, result, options.recordOutputs);
return result;
} catch (error) {
captureException(error, {
Expand All @@ -237,7 +222,7 @@ function instrumentMethod<T extends unknown[], R>(
/**
* Create a deep proxy for OpenAI client instrumentation
*/
function createDeepProxy<T extends object>(target: T, currentPath = '', options?: OpenAiOptions): T {
function createDeepProxy<T extends object>(target: T, currentPath = '', options: OpenAiOptions): T {
return new Proxy(target, {
get(obj: object, prop: string): unknown {
const value = (obj as Record<string, unknown>)[prop];
Expand Down Expand Up @@ -267,5 +252,13 @@ function createDeepProxy<T extends object>(target: T, currentPath = '', options?
* Can be used across Node.js, Cloudflare Workers, and Vercel Edge
*/
export function instrumentOpenAiClient<T extends object>(client: T, options?: OpenAiOptions): T {
return createDeepProxy(client, '', options);
const sendDefaultPii = Boolean(getClient()?.getOptions().sendDefaultPii);

const _options = {
recordInputs: sendDefaultPii,
recordOutputs: sendDefaultPii,
...options,
};

return createDeepProxy(client, '', _options);
}
7 changes: 3 additions & 4 deletions packages/node/src/integrations/tracing/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,16 @@ import { defineIntegration, OPENAI_INTEGRATION_NAME } from '@sentry/core';
import { generateInstrumentOnce } from '@sentry/node-core';
import { SentryOpenAiInstrumentation } from './instrumentation';

export const instrumentOpenAi = generateInstrumentOnce(
export const instrumentOpenAi = generateInstrumentOnce<OpenAiOptions>(
OPENAI_INTEGRATION_NAME,
() => new SentryOpenAiInstrumentation({}),
options => new SentryOpenAiInstrumentation(options),
);

const _openAiIntegration = ((options: OpenAiOptions = {}) => {
return {
name: OPENAI_INTEGRATION_NAME,
options,
setupOnce() {
instrumentOpenAi();
instrumentOpenAi(options);
},
};
}) satisfies IntegrationFn;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ export interface OpenAiIntegration extends Integration {
options: OpenAiOptions;
}

type OpenAiInstrumentationOptions = InstrumentationConfig & OpenAiOptions;

/**
* Represents the patched shape of the OpenAI module export.
*/
Expand All @@ -28,23 +30,11 @@ interface PatchedModuleExports {
AzureOpenAI?: abstract new (...args: unknown[]) => OpenAiClient;
}

/**
* Determines telemetry recording settings.
*/
function determineRecordingSettings(
integrationOptions: OpenAiOptions | undefined,
defaultEnabled: boolean,
): { recordInputs: boolean; recordOutputs: boolean } {
const recordInputs = integrationOptions?.recordInputs ?? defaultEnabled;
const recordOutputs = integrationOptions?.recordOutputs ?? defaultEnabled;
return { recordInputs, recordOutputs };
}

/**
* Sentry OpenAI instrumentation using OpenTelemetry.
*/
export class SentryOpenAiInstrumentation extends InstrumentationBase<InstrumentationConfig> {
public constructor(config: InstrumentationConfig = {}) {
export class SentryOpenAiInstrumentation extends InstrumentationBase<OpenAiInstrumentationOptions> {
public constructor(config: OpenAiInstrumentationOptions = {}) {
super('@sentry/instrumentation-openai', SDK_VERSION, config);
}

Expand Down Expand Up @@ -75,6 +65,8 @@ export class SentryOpenAiInstrumentation extends InstrumentationBase<Instrumenta
return exports;
}

const config = this.getConfig();

const WrappedOpenAI = function (this: unknown, ...args: unknown[]) {
// Check if wrapping should be skipped (e.g., when LangChain is handling instrumentation)
if (_INTERNAL_shouldSkipAiProviderWrapping(OPENAI_INTEGRATION_NAME)) {
Expand All @@ -83,11 +75,10 @@ export class SentryOpenAiInstrumentation extends InstrumentationBase<Instrumenta

const instance = Reflect.construct(Original, args);
const client = getClient();
const integration = client?.getIntegrationByName<OpenAiIntegration>(OPENAI_INTEGRATION_NAME);
const integrationOpts = integration?.options;
const defaultPii = Boolean(client?.getOptions().sendDefaultPii);

const { recordInputs, recordOutputs } = determineRecordingSettings(integrationOpts, defaultPii);
const recordInputs = config.recordInputs ?? defaultPii;
const recordOutputs = config.recordOutputs ?? defaultPii;

return instrumentOpenAiClient(instance as OpenAiClient, {
recordInputs,
Expand Down
Loading