Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -313,49 +313,26 @@ describe('LangChain integration', () => {
'scenario-openai-before-langchain.mjs',
'instrument.mjs',
(createRunner, test) => {
test('demonstrates timing issue with duplicate spans (ESM only)', async () => {
test('suppresses provider spans inside LangChain calls but keeps direct calls', async () => {
await createRunner()
.ignore('event')
.expect({
transaction: event => {
// This test highlights the limitation: if a user creates an Anthropic client
// before importing LangChain, that client will still be instrumented and
// could cause duplicate spans when used alongside LangChain.

const spans = event.spans || [];

// First call: Direct Anthropic call made BEFORE LangChain import
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
const firstAnthropicSpan = spans.find(
const anthropicSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Second call: LangChain call
// This should have LangChain instrumentation (origin: 'auto.ai.langchain')
const langchainSpan = spans.find(
const langchainSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.langchain',
);

// Third call: Direct Anthropic call made AFTER LangChain import
// This should NOT have Anthropic instrumentation (skip works correctly)
// Count how many Anthropic spans we have - should be exactly 1
const anthropicSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Verify the edge case limitation:
// - First Anthropic client (created before LangChain) IS instrumented
expect(firstAnthropicSpan).toBeDefined();
expect(firstAnthropicSpan?.origin).toBe('auto.ai.anthropic');

// - LangChain call IS instrumented by LangChain
expect(langchainSpan).toBeDefined();
expect(langchainSpan?.origin).toBe('auto.ai.langchain');
// Both direct Anthropic calls (before and after LangChain import) should produce spans
// Context-scoped suppression only suppresses spans inside LangChain calls, not globally
expect(anthropicSpans).toHaveLength(2);

// - Second Anthropic client (created after LangChain) is NOT instrumented
// This demonstrates that the skip mechanism works for NEW clients
// We should only have ONE Anthropic span (the first one), not two
expect(anthropicSpans).toHaveLength(1);
// LangChain call should produce exactly one LangChain span
expect(langchainSpans).toHaveLength(1);
},
})
.start()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -347,49 +347,26 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
'scenario-openai-before-langchain.mjs',
'instrument.mjs',
(createRunner, test) => {
test('demonstrates timing issue with duplicate spans (ESM only)', async () => {
test('suppresses provider spans inside LangChain calls but keeps direct calls', async () => {
await createRunner()
.ignore('event')
.expect({
transaction: event => {
// This test highlights the limitation: if a user creates an Anthropic client
// before importing LangChain, that client will still be instrumented and
// could cause duplicate spans when used alongside LangChain.

const spans = event.spans || [];

// First call: Direct Anthropic call made BEFORE LangChain import
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
const firstAnthropicSpan = spans.find(
const anthropicSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Second call: LangChain call
// This should have LangChain instrumentation (origin: 'auto.ai.langchain')
const langchainSpan = spans.find(
const langchainSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.langchain',
);

// Third call: Direct Anthropic call made AFTER LangChain import
// This should NOT have Anthropic instrumentation (skip works correctly)
// Count how many Anthropic spans we have - should be exactly 1
const anthropicSpans = spans.filter(
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
);

// Verify the edge case limitation:
// - First Anthropic client (created before LangChain) IS instrumented
expect(firstAnthropicSpan).toBeDefined();
expect(firstAnthropicSpan?.origin).toBe('auto.ai.anthropic');

// - LangChain call IS instrumented by LangChain
expect(langchainSpan).toBeDefined();
expect(langchainSpan?.origin).toBe('auto.ai.langchain');
// Both direct Anthropic calls (before and after LangChain import) should produce spans
// Context-scoped suppression only suppresses spans inside LangChain calls, not globally
expect(anthropicSpans).toHaveLength(2);

// - Second Anthropic client (created after LangChain) is NOT instrumented
// This demonstrates that the skip mechanism works for NEW clients
// We should only have ONE Anthropic span (the first one), not two
expect(anthropicSpans).toHaveLength(1);
// LangChain call should produce exactly one LangChain span
expect(langchainSpans).toHaveLength(1);
},
})
.start()
Expand Down
7 changes: 3 additions & 4 deletions packages/core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,9 @@ export { makeOfflineTransport } from './transports/offline';
export { makeMultiplexedTransport, MULTIPLEXED_TRANSPORT_EXTRA_KEY } from './transports/multiplexed';
export { getIntegrationsToSetup, addIntegration, defineIntegration, installedIntegrations } from './integration';
export {
_INTERNAL_skipAiProviderWrapping,
_INTERNAL_shouldSkipAiProviderWrapping,
_INTERNAL_clearAiProviderSkips,
} from './utils/ai/providerSkip';
_INTERNAL_isAiProviderSpanSuppressed,
_INTERNAL_withSuppressedAiProviderSpans,
} from './tracing/ai/suppression';
export { envToBool } from './utils/envToBool';
export { applyScopeDataToEvent, mergeScopeData, getCombinedScopeData } from './utils/scopeData';
export { prepareEvent } from './utils/prepareEvent';
Expand Down
27 changes: 27 additions & 0 deletions packages/core/src/tracing/ai/suppression.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { getCurrentScope, withScope } from '../../currentScopes';
import type { Scope } from '../../scope';

const SUPPRESS_AI_PROVIDER_SPANS_KEY = '__SENTRY_SUPPRESS_AI_PROVIDER_SPANS__';

/**
* Check if AI provider spans should be suppressed in the current scope.
*
* @internal
*/
export function _INTERNAL_isAiProviderSpanSuppressed(): boolean {
return getCurrentScope().getScopeData().sdkProcessingMetadata[SUPPRESS_AI_PROVIDER_SPANS_KEY] === true;
}

/**
* Execute a callback with AI provider spans suppressed in the current scope.
* This is used by higher-level integrations (like LangChain) to prevent
* duplicate spans from underlying AI provider instrumentations.
*
* @internal
*/
export function _INTERNAL_withSuppressedAiProviderSpans<T>(callback: () => T): T {
return withScope((scope: Scope) => {
scope.setSDKProcessingMetadata({ [SUPPRESS_AI_PROVIDER_SPANS_KEY]: true });
return callback();
});
}
5 changes: 5 additions & 0 deletions packages/core/src/tracing/anthropic-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { getClient } from '../../currentScopes';
import { captureException } from '../../exports';
import { SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '../../semanticAttributes';
import { SPAN_STATUS_ERROR } from '../../tracing';
import { _INTERNAL_isAiProviderSpanSuppressed } from '../../tracing/ai/suppression';
import { startSpan, startSpanManual } from '../../tracing/trace';
import type { Span, SpanAttributeValue } from '../../types-hoist/span';
import { handleCallbackErrors } from '../../utils/handleCallbackErrors';
Expand Down Expand Up @@ -256,6 +257,10 @@ function instrumentMethod<T extends unknown[], R>(
): (...args: T) => R | Promise<R> {
return new Proxy(originalMethod, {
apply(target, thisArg, args: T): R | Promise<R> {
if (_INTERNAL_isAiProviderSpanSuppressed()) {
return Reflect.apply(target, thisArg, args);
}

const requestAttributes = extractRequestAttributes(args, methodPath);
const model = requestAttributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] ?? 'unknown';
const operationName = getFinalOperationName(methodPath);
Expand Down
5 changes: 5 additions & 0 deletions packages/core/src/tracing/google-genai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { getClient } from '../../currentScopes';
import { captureException } from '../../exports';
import { SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '../../semanticAttributes';
import { SPAN_STATUS_ERROR } from '../../tracing';
import { _INTERNAL_isAiProviderSpanSuppressed } from '../../tracing/ai/suppression';
import { startSpan, startSpanManual } from '../../tracing/trace';
import type { Span, SpanAttributeValue } from '../../types-hoist/span';
import { handleCallbackErrors } from '../../utils/handleCallbackErrors';
Expand Down Expand Up @@ -260,6 +261,10 @@ function instrumentMethod<T extends unknown[], R>(

return new Proxy(originalMethod, {
apply(target, _, args: T): R | Promise<R> {
if (_INTERNAL_isAiProviderSpanSuppressed()) {
return Reflect.apply(target, _, args);
}

const params = args[0] as Record<string, unknown> | undefined;
const requestAttributes = extractRequestAttributes(methodPath, params, context);
const model = requestAttributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] ?? 'unknown';
Expand Down
5 changes: 5 additions & 0 deletions packages/core/src/tracing/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import {
GEN_AI_SYSTEM_INSTRUCTIONS_ATTRIBUTE,
OPENAI_OPERATIONS,
} from '../ai/gen-ai-attributes';
import { _INTERNAL_isAiProviderSpanSuppressed } from '../ai/suppression';
import { extractSystemInstructions, getTruncatedJsonString } from '../ai/utils';
import { instrumentStream } from './streaming';
import type {
Expand Down Expand Up @@ -254,6 +255,10 @@ function instrumentMethod<T extends unknown[], R>(
options: OpenAiOptions,
): (...args: T) => Promise<R> {
return function instrumentedMethod(...args: T): Promise<R> {
if (_INTERNAL_isAiProviderSpanSuppressed()) {
return originalMethod.apply(context, args) as Promise<R>;
}

const requestAttributes = extractRequestAttributes(args, methodPath);
const model = (requestAttributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] as string) || 'unknown';
const operationName = getOperationName(methodPath);
Expand Down
1 change: 0 additions & 1 deletion packages/core/src/tracing/vercel-ai/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable max-lines */
import type { Client } from '../../client';
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '../../semanticAttributes';
import type { Event } from '../../types-hoist/event';
Expand Down
64 changes: 0 additions & 64 deletions packages/core/src/utils/ai/providerSkip.ts

This file was deleted.

71 changes: 0 additions & 71 deletions packages/core/test/lib/utils/ai/providerSkip.test.ts

This file was deleted.

13 changes: 1 addition & 12 deletions packages/node-core/src/sdk/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,7 @@ import { trace } from '@opentelemetry/api';
import { registerInstrumentations } from '@opentelemetry/instrumentation';
import type { BasicTracerProvider } from '@opentelemetry/sdk-trace-base';
import type { DynamicSamplingContext, Scope, ServerRuntimeClientOptions, TraceContext } from '@sentry/core';
import {
_INTERNAL_clearAiProviderSkips,
_INTERNAL_flushLogsBuffer,
applySdkMetadata,
debug,
SDK_VERSION,
ServerRuntimeClient,
} from '@sentry/core';
import { _INTERNAL_flushLogsBuffer, applySdkMetadata, debug, SDK_VERSION, ServerRuntimeClient } from '@sentry/core';
import { type AsyncLocalStorageLookup, getTraceContextForScope } from '@sentry/opentelemetry';
import { isMainThread, threadId } from 'worker_threads';
import { DEBUG_BUILD } from '../debug-build';
Expand Down Expand Up @@ -157,10 +150,6 @@ export class NodeClient extends ServerRuntimeClient<NodeClientOptions> {

/** @inheritDoc */
protected _setupIntegrations(): void {
// Clear AI provider skip registrations before setting up integrations
// This ensures a clean state between different client initializations
// (e.g., when LangChain skips OpenAI in one client, but a subsequent client uses OpenAI standalone)
_INTERNAL_clearAiProviderSkips();
super._setupIntegrations();
}

Expand Down
Loading
Loading