Skip to content

Commit 7f40621

Browse files
authored
feat(cloudflare,vercel-edge): Add support for Anthropic AI instrumentation (#17571)
Adds support for AnthropicAI manual instrumentation in @sentry/cloudflare and @sentry/vercel-edge. To instrument the AnthropicAI client, wrap it with Sentry.instrumentAnthropicAiClient and set recording settings. ``` import * as Sentry from '@sentry/cloudflare'; import Anthropic from '@anthropic-ai/sdk'; const anthropic = new Anthropic(); const client = Sentry.instrumentAnthropicAiClient(anthropic, { recordInputs: true, recordOutputs: true }); // use the wrapped client ```
1 parent 0305685 commit 7f40621

File tree

8 files changed

+158
-5
lines changed

8 files changed

+158
-5
lines changed
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import * as Sentry from '@sentry/cloudflare';
2+
import type { AnthropicAiClient } from '@sentry/core';
3+
import { MockAnthropic } from './mocks';
4+
5+
interface Env {
6+
SENTRY_DSN: string;
7+
}
8+
9+
const mockClient = new MockAnthropic({
10+
apiKey: 'mock-api-key',
11+
});
12+
13+
const client: AnthropicAiClient = Sentry.instrumentAnthropicAiClient(mockClient);
14+
15+
export default Sentry.withSentry(
16+
(env: Env) => ({
17+
dsn: env.SENTRY_DSN,
18+
tracesSampleRate: 1.0,
19+
}),
20+
{
21+
async fetch(_request, _env, _ctx) {
22+
const response = await client.messages?.create({
23+
model: 'claude-3-haiku-20240307',
24+
messages: [{ role: 'user', content: 'What is the capital of France?' }],
25+
temperature: 0.7,
26+
max_tokens: 100,
27+
});
28+
29+
return new Response(JSON.stringify(response));
30+
},
31+
},
32+
);
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
import type { AnthropicAiClient, AnthropicAiResponse } from '@sentry/core';
2+
3+
export class MockAnthropic implements AnthropicAiClient {
4+
public messages: {
5+
create: (...args: unknown[]) => Promise<AnthropicAiResponse>;
6+
countTokens: (...args: unknown[]) => Promise<AnthropicAiResponse>;
7+
};
8+
public models: {
9+
list: (...args: unknown[]) => Promise<AnthropicAiResponse>;
10+
get: (...args: unknown[]) => Promise<AnthropicAiResponse>;
11+
};
12+
public completions: {
13+
create: (...args: unknown[]) => Promise<AnthropicAiResponse>;
14+
};
15+
public apiKey: string;
16+
17+
public constructor(config: { apiKey: string }) {
18+
this.apiKey = config.apiKey;
19+
20+
// Main focus: messages.create functionality
21+
this.messages = {
22+
create: async (...args: unknown[]) => {
23+
const params = args[0] as { model: string; stream?: boolean };
24+
// Simulate processing time
25+
await new Promise(resolve => setTimeout(resolve, 10));
26+
27+
if (params.model === 'error-model') {
28+
const error = new Error('Model not found');
29+
(error as unknown as { status: number }).status = 404;
30+
(error as unknown as { headers: Record<string, string> }).headers = { 'x-request-id': 'mock-request-123' };
31+
throw error;
32+
}
33+
34+
return {
35+
id: 'msg_mock123',
36+
type: 'message',
37+
role: 'assistant',
38+
model: params.model,
39+
content: [
40+
{
41+
type: 'text',
42+
text: 'Hello from Anthropic mock!',
43+
},
44+
],
45+
stop_reason: 'end_turn',
46+
stop_sequence: null,
47+
usage: {
48+
input_tokens: 10,
49+
output_tokens: 15,
50+
cache_creation_input_tokens: 0,
51+
cache_read_input_tokens: 0,
52+
},
53+
};
54+
},
55+
countTokens: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock', input_tokens: 0 }),
56+
};
57+
58+
// Minimal implementations for required interface compliance
59+
this.models = {
60+
list: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock' }),
61+
get: async (..._args: unknown[]) => ({ id: 'mock', type: 'model', model: 'mock' }),
62+
};
63+
64+
this.completions = {
65+
create: async (..._args: unknown[]) => ({ id: 'mock', type: 'completion', model: 'mock' }),
66+
};
67+
}
68+
}
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import { expect, it } from 'vitest';
2+
import { createRunner } from '../../../runner';
3+
4+
// These tests are not exhaustive because the instrumentation is
5+
// already tested in the node integration tests and we merely
6+
// want to test that the instrumentation does not break in our
7+
// cloudflare SDK.
8+
9+
it('traces a basic message creation request', async () => {
10+
const runner = createRunner(__dirname)
11+
.ignore('event')
12+
.expect(envelope => {
13+
const transactionEvent = envelope[1]?.[0]?.[1] as any;
14+
15+
expect(transactionEvent.transaction).toBe('GET /');
16+
expect(transactionEvent.spans).toEqual(
17+
expect.arrayContaining([
18+
expect.objectContaining({
19+
data: expect.objectContaining({
20+
'gen_ai.operation.name': 'messages',
21+
'sentry.op': 'gen_ai.messages',
22+
'sentry.origin': 'auto.ai.anthropic',
23+
'gen_ai.system': 'anthropic',
24+
'gen_ai.request.model': 'claude-3-haiku-20240307',
25+
'gen_ai.request.temperature': 0.7,
26+
'gen_ai.response.model': 'claude-3-haiku-20240307',
27+
'gen_ai.response.id': 'msg_mock123',
28+
'gen_ai.usage.input_tokens': 10,
29+
'gen_ai.usage.output_tokens': 15,
30+
}),
31+
description: 'messages claude-3-haiku-20240307',
32+
op: 'gen_ai.messages',
33+
origin: 'auto.ai.anthropic',
34+
}),
35+
]),
36+
);
37+
})
38+
.start();
39+
await runner.makeRequest('get', '/');
40+
await runner.completed();
41+
});
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"name": "worker-name",
3+
"compatibility_date": "2025-06-17",
4+
"main": "index.ts",
5+
"compatibility_flags": ["nodejs_compat"],
6+
}

packages/cloudflare/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ export {
7070
// eslint-disable-next-line deprecation/deprecation
7171
inboundFiltersIntegration,
7272
instrumentOpenAiClient,
73+
instrumentAnthropicAiClient,
7374
eventFiltersIntegration,
7475
linkedErrorsIntegration,
7576
requestDataIntegration,

packages/core/src/index.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,12 @@ export { OPENAI_INTEGRATION_NAME } from './utils/openai/constants';
131131
export { instrumentAnthropicAiClient } from './utils/anthropic-ai';
132132
export { ANTHROPIC_AI_INTEGRATION_NAME } from './utils/anthropic-ai/constants';
133133
export type { OpenAiClient, OpenAiOptions, InstrumentedMethod } from './utils/openai/types';
134-
export type { AnthropicAiClient, AnthropicAiOptions, AnthropicAiInstrumentedMethod } from './utils/anthropic-ai/types';
134+
export type {
135+
AnthropicAiClient,
136+
AnthropicAiOptions,
137+
AnthropicAiInstrumentedMethod,
138+
AnthropicAiResponse,
139+
} from './utils/anthropic-ai/types';
135140
export type { FeatureFlag } from './utils/featureFlags';
136141

137142
export {

packages/core/src/utils/anthropic-ai/index.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import { buildMethodPath, getFinalOperationName, getSpanOperation, setTokenUsage
2727
import { ANTHROPIC_AI_INTEGRATION_NAME } from './constants';
2828
import { instrumentStream } from './streaming';
2929
import type {
30-
AnthropicAiClient,
3130
AnthropicAiInstrumentedMethod,
3231
AnthropicAiIntegration,
3332
AnthropicAiOptions,
@@ -304,7 +303,7 @@ function instrumentMethod<T extends unknown[], R>(
304303
/**
305304
* Create a deep proxy for Anthropic AI client instrumentation
306305
*/
307-
function createDeepProxy<T extends AnthropicAiClient>(target: T, currentPath = '', options?: AnthropicAiOptions): T {
306+
function createDeepProxy<T extends object>(target: T, currentPath = '', options?: AnthropicAiOptions): T {
308307
return new Proxy(target, {
309308
get(obj: object, prop: string): unknown {
310309
const value = (obj as Record<string, unknown>)[prop];
@@ -332,11 +331,11 @@ function createDeepProxy<T extends AnthropicAiClient>(target: T, currentPath = '
332331
* Instrument an Anthropic AI client with Sentry tracing
333332
* Can be used across Node.js, Cloudflare Workers, and Vercel Edge
334333
*
335-
* @template T - The type of the client that extends AnthropicAiClient
334+
* @template T - The type of the client that extends object
336335
* @param client - The Anthropic AI client to instrument
337336
* @param options - Optional configuration for recording inputs and outputs
338337
* @returns The instrumented client with the same type as the input
339338
*/
340-
export function instrumentAnthropicAiClient<T extends AnthropicAiClient>(client: T, options?: AnthropicAiOptions): T {
339+
export function instrumentAnthropicAiClient<T extends object>(client: T, options?: AnthropicAiOptions): T {
341340
return createDeepProxy(client, '', options);
342341
}

packages/vercel-edge/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ export {
7070
// eslint-disable-next-line deprecation/deprecation
7171
inboundFiltersIntegration,
7272
instrumentOpenAiClient,
73+
instrumentAnthropicAiClient,
7374
eventFiltersIntegration,
7475
linkedErrorsIntegration,
7576
requestDataIntegration,

0 commit comments

Comments
 (0)